From 203dd358d7d277e0dd05c78dd02a7f793cbc3ed6 Mon Sep 17 00:00:00 2001 From: ANANDHU S <71482562+anandhu-eng@users.noreply.github.com> Date: Tue, 21 May 2024 02:09:05 +0530 Subject: [PATCH 1/7] Update customize.py --- script/get-cuda-devices/customize.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/script/get-cuda-devices/customize.py b/script/get-cuda-devices/customize.py index 8236b8029..c3b8e3c74 100644 --- a/script/get-cuda-devices/customize.py +++ b/script/get-cuda-devices/customize.py @@ -31,7 +31,9 @@ def postprocess(i): key_env = 'CM_CUDA_DEVICE_PROP_'+key.upper().replace(' ','_') env[key_env] = val + state['cm_cuda_device_prop'] = p - + print(env) return {'return':0} + From 43063ea8c12cdd511181b82febffa98799d09561 Mon Sep 17 00:00:00 2001 From: ANANDHU S <71482562+anandhu-eng@users.noreply.github.com> Date: Tue, 21 May 2024 02:39:48 +0530 Subject: [PATCH 2/7] Update customize.py --- script/get-cuda-devices/customize.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/script/get-cuda-devices/customize.py b/script/get-cuda-devices/customize.py index c3b8e3c74..54fa9094f 100644 --- a/script/get-cuda-devices/customize.py +++ b/script/get-cuda-devices/customize.py @@ -34,6 +34,6 @@ def postprocess(i): state['cm_cuda_device_prop'] = p - print(env) + return {'return':0} From 798d97a255b214d9d7ddf032f80d79efcb602a52 Mon Sep 17 00:00:00 2001 From: ANANDHU S <71482562+anandhu-eng@users.noreply.github.com> Date: Fri, 24 May 2024 21:37:59 +0530 Subject: [PATCH 3/7] Mkdocs facility added --- .github/workflows/publish.yaml | 33 + docs/AI-ML-datasets/get-croissant.md | 126 ++ docs/AI-ML-datasets/get-dataset-cifar10.md | 164 ++ docs/AI-ML-datasets/get-dataset-cnndm.md | 175 +++ docs/AI-ML-datasets/get-dataset-coco.md | 215 +++ docs/AI-ML-datasets/get-dataset-coco2014.md | 204 +++ docs/AI-ML-datasets/get-dataset-criteo.md | 154 ++ .../get-dataset-imagenet-aux.md | 155 ++ .../get-dataset-imagenet-calibration.md | 146 ++ .../get-dataset-imagenet-helper.md | 120 ++ .../get-dataset-imagenet-train.md | 149 ++ .../get-dataset-imagenet-val.md | 211 +++ docs/AI-ML-datasets/get-dataset-kits19.md | 172 +++ .../AI-ML-datasets/get-dataset-librispeech.md | 134 ++ .../get-dataset-openimages-annotations.md | 144 ++ .../get-dataset-openimages-calibration.md | 178 +++ docs/AI-ML-datasets/get-dataset-openimages.md | 250 ++++ docs/AI-ML-datasets/get-dataset-openorca.md | 173 +++ .../AI-ML-datasets/get-dataset-squad-vocab.md | 142 ++ docs/AI-ML-datasets/get-dataset-squad.md | 129 ++ .../get-preprocessed-dataset-criteo.md | 226 +++ .../get-preprocessed-dataset-generic.md | 117 ++ .../get-preprocessed-dataset-imagenet.md | 456 ++++++ .../get-preprocessed-dataset-kits19.md | 232 +++ .../get-preprocessed-dataset-librispeech.md | 222 +++ .../get-preprocessed-dataset-openimages.md | 401 +++++ .../get-preprocessed-dataset-openorca.md | 178 +++ .../get-preprocessed-dataset-squad.md | 238 +++ docs/AI-ML-frameworks/get-google-saxml.md | 133 ++ .../get-onnxruntime-prebuilt.md | 157 ++ docs/AI-ML-frameworks/get-qaic-apps-sdk.md | 124 ++ .../AI-ML-frameworks/get-qaic-platform-sdk.md | 128 ++ .../AI-ML-frameworks/get-qaic-software-kit.md | 176 +++ docs/AI-ML-frameworks/get-rocm.md | 126 ++ docs/AI-ML-frameworks/get-tvm.md | 198 +++ .../install-qaic-compute-sdk-from-src.md | 199 +++ docs/AI-ML-frameworks/install-rocm.md | 129 ++ .../install-tensorflow-for-c.md | 122 ++ .../install-tensorflow-from-src.md | 165 ++ .../install-tflite-from-src.md | 135 ++ .../convert-ml-model-huggingface-to-onnx.md | 143 ++ docs/AI-ML-models/get-bert-squad-vocab.md | 119 ++ docs/AI-ML-models/get-dlrm.md | 143 ++ .../get-ml-model-3d-unet-kits19.md | 200 +++ .../get-ml-model-bert-base-squad.md | 183 +++ .../get-ml-model-bert-large-squad.md | 357 +++++ .../get-ml-model-dlrm-terabyte.md | 262 ++++ .../get-ml-model-efficientnet-lite.md | 248 +++ docs/AI-ML-models/get-ml-model-gptj.md | 321 ++++ .../get-ml-model-huggingface-zoo.md | 192 +++ docs/AI-ML-models/get-ml-model-llama2.md | 222 +++ docs/AI-ML-models/get-ml-model-mobilenet.md | 470 ++++++ .../get-ml-model-neuralmagic-zoo.md | 335 +++++ docs/AI-ML-models/get-ml-model-resnet50.md | 356 +++++ .../get-ml-model-retinanet-nvidia.md | 172 +++ docs/AI-ML-models/get-ml-model-retinanet.md | 225 +++ docs/AI-ML-models/get-ml-model-rnnt.md | 192 +++ .../get-ml-model-stable-diffusion.md | 256 ++++ docs/AI-ML-models/get-ml-model-tiny-resnet.md | 213 +++ ...-ml-model-using-imagenet-from-model-zoo.md | 147 ++ docs/AI-ML-models/get-tvm-model.md | 288 ++++ .../calibrate-model-for.qaic.md | 289 ++++ .../compile-model-for.qaic.md | 438 ++++++ docs/AI-ML-optimization/prune-bert-models.md | 185 +++ .../test-mlperf-inference-retinanet.md | 135 ++ docs/CUDA-automation/get-cuda-devices.md | 122 ++ docs/CUDA-automation/get-cuda.md | 230 +++ docs/CUDA-automation/get-cudnn.md | 167 +++ docs/CUDA-automation/get-tensorrt.md | 176 +++ .../install-cuda-package-manager.md | 124 ++ docs/CUDA-automation/install-cuda-prebuilt.md | 180 +++ docs/Cloud-automation/destroy-terraform.md | 121 ++ docs/Cloud-automation/get-aws-cli.md | 125 ++ docs/Cloud-automation/get-terraform.md | 126 ++ docs/Cloud-automation/install-aws-cli.md | 123 ++ .../install-terraform-from-src.md | 130 ++ docs/Cloud-automation/run-terraform.md | 481 ++++++ .../launch-benchmark.md | 116 ++ docs/Compiler-automation/get-aocl.md | 137 ++ docs/Compiler-automation/get-cl.md | 138 ++ .../Compiler-automation/get-compiler-flags.md | 130 ++ docs/Compiler-automation/get-compiler-rust.md | 120 ++ docs/Compiler-automation/get-gcc.md | 154 ++ docs/Compiler-automation/get-go.md | 126 ++ docs/Compiler-automation/get-llvm.md | 175 +++ docs/Compiler-automation/install-gcc-src.md | 127 ++ .../install-ipex-from-src.md | 198 +++ .../install-llvm-prebuilt.md | 137 ++ docs/Compiler-automation/install-llvm-src.md | 292 ++++ .../install-onednn-from-src.md | 181 +++ .../install-onnxruntime-from-src.md | 184 +++ .../install-pytorch-from-src.md | 248 +++ .../install-pytorch-kineto-from-src.md | 191 +++ .../install-torchvision-from-src.md | 194 +++ .../install-tpp-pytorch-extension.md | 198 +++ .../install-transformers-from-src.md | 196 +++ .../publish-results-to-dashboard.md | 123 ++ .../get-android-sdk.md | 151 ++ .../get-aria2.md | 148 ++ .../get-bazel.md | 127 ++ .../get-blis.md | 158 ++ .../get-brew.md | 117 ++ .../get-cmake.md | 130 ++ .../get-cmsis_5.md | 149 ++ .../get-docker.md | 119 ++ .../get-generic-sys-util.md | 227 +++ .../get-google-test.md | 137 ++ .../get-java.md | 165 ++ .../get-javac.md | 168 +++ .../get-lib-armnn.md | 132 ++ .../get-lib-dnnl.md | 132 ++ .../get-lib-protobuf.md | 154 ++ .../get-lib-qaic-api.md | 131 ++ .../get-nvidia-docker.md | 121 ++ .../get-openssl.md | 125 ++ .../get-rclone.md | 150 ++ .../get-sys-utils-cm.md | 156 ++ .../get-sys-utils-min.md | 117 ++ .../get-xilinx-sdk.md | 138 ++ .../get-zendnn.md | 127 ++ .../install-bazel.md | 134 ++ .../install-cmake-prebuilt.md | 136 ++ .../install-gflags.md | 127 ++ .../install-github-cli.md | 121 ++ .../install-numactl-from-src.md | 170 +++ .../install-openssl.md | 134 ++ docs/DevOps-automation/benchmark-program.md | 151 ++ docs/DevOps-automation/compile-program.md | 128 ++ docs/DevOps-automation/convert-csv-to-md.md | 143 ++ docs/DevOps-automation/copy-to-clipboard.md | 141 ++ docs/DevOps-automation/create-conda-env.md | 148 ++ docs/DevOps-automation/create-patch.md | 135 ++ docs/DevOps-automation/detect-sudo.md | 120 ++ .../DevOps-automation/download-and-extract.md | 216 +++ docs/DevOps-automation/download-file.md | 202 +++ docs/DevOps-automation/download-torrent.md | 155 ++ docs/DevOps-automation/extract-file.md | 168 +++ docs/DevOps-automation/fail.md | 132 ++ docs/DevOps-automation/get-conda.md | 164 ++ docs/DevOps-automation/get-git-repo.md | 240 +++ docs/DevOps-automation/get-github-cli.md | 120 ++ docs/DevOps-automation/pull-git-repo.md | 134 ++ .../push-csv-to-spreadsheet.md | 142 ++ .../set-device-settings-qaic.md | 143 ++ docs/DevOps-automation/set-echo-off-win.md | 116 ++ .../DevOps-automation/set-performance-mode.md | 180 +++ docs/DevOps-automation/set-sqlite-dir.md | 141 ++ docs/DevOps-automation/tar-my-folder.md | 133 ++ docs/Docker-automation/build-docker-image.md | 160 ++ docs/Docker-automation/build-dockerfile.md | 186 +++ docs/Docker-automation/prune-docker.md | 118 ++ .../Docker-automation/run-docker-container.md | 166 ++ docs/GUI/gui.md | 243 +++ docs/Legacy-CK-support/get-ck-repo-mlops.md | 120 ++ docs/Legacy-CK-support/get-ck.md | 118 ++ .../add-custom-nvidia-system.md | 175 +++ ...ark-any-mlperf-inference-implementation.md | 268 ++++ .../build-mlperf-inference-server-nvidia.md | 248 +++ .../generate-mlperf-inference-submission.md | 191 +++ .../generate-mlperf-inference-user-conf.md | 199 +++ .../generate-mlperf-tiny-report.md | 145 ++ .../generate-mlperf-tiny-submission.md | 414 +++++ .../generate-nvidia-engine.md | 244 +++ ...et-mlperf-inference-intel-scratch-space.md | 161 ++ .../get-mlperf-inference-loadgen.md | 224 +++ ...get-mlperf-inference-nvidia-common-code.md | 150 ++ ...t-mlperf-inference-nvidia-scratch-space.md | 162 ++ .../get-mlperf-inference-results-dir.md | 159 ++ .../get-mlperf-inference-results.md | 163 ++ .../get-mlperf-inference-src.md | 266 ++++ .../get-mlperf-inference-submission-dir.md | 159 ++ .../get-mlperf-inference-sut-configs.md | 161 ++ .../get-mlperf-inference-sut-description.md | 159 ++ .../get-mlperf-logging.md | 127 ++ .../get-mlperf-power-dev.md | 171 +++ ...get-mlperf-tiny-eembc-energy-runner-src.md | 129 ++ .../get-mlperf-tiny-src.md | 143 ++ .../get-mlperf-training-nvidia-code.md | 158 ++ .../get-mlperf-training-src.md | 224 +++ .../get-nvidia-mitten.md | 132 ++ docs/MLPerf-benchmark-support/get-spec-ptd.md | 164 ++ .../import-mlperf-inference-to-experiment.md | 152 ++ .../import-mlperf-tiny-to-experiment.md | 135 ++ .../import-mlperf-training-to-experiment.md | 141 ++ .../install-mlperf-logging-from-src.md | 126 ++ .../prepare-training-data-bert.md | 193 +++ .../prepare-training-data-resnet.md | 206 +++ .../preprocess-mlperf-inference-submission.md | 144 ++ .../process-mlperf-accuracy.md | 334 +++++ ...push-mlperf-inference-results-to-github.md | 150 ++ .../run-all-mlperf-models.md | 237 +++ .../run-mlperf-inference-mobilenet-models.md | 383 +++++ ...run-mlperf-inference-submission-checker.md | 199 +++ .../run-mlperf-power-client.md | 154 ++ .../run-mlperf-power-server.md | 165 ++ .../run-mlperf-training-submission-checker.md | 181 +++ .../truncate-mlperf-inference-accuracy-log.md | 145 ++ .../app-image-classification-onnx-py.md | 213 +++ .../app-image-classification-tf-onnx-cpp.md | 133 ++ .../app-image-classification-torch-py.md | 170 +++ .../app-image-classification-tvm-onnx-py.md | 158 ++ .../app-stable-diffusion-onnx-py.md | 203 +++ .../app-mlperf-inference-dummy.md | 360 +++++ .../app-mlperf-inference-intel.md | 621 ++++++++ .../app-mlperf-inference-qualcomm.md | 775 ++++++++++ .../app-loadgen-generic-python.md | 331 ++++ ...app-mlperf-inference-ctuning-cpp-tflite.md | 382 +++++ .../app-mlperf-inference-mlcommons-cpp.md | 336 +++++ .../app-mlperf-inference-mlcommons-python.md | 944 ++++++++++++ .../app-mlperf-inference.md | 805 ++++++++++ .../benchmark-program-mlperf.md | 152 ++ .../run-mlperf-inference-app.md | 405 +++++ .../app-mlperf-training-nvidia.md | 242 +++ .../app-mlperf-training-reference.md | 240 +++ .../app-image-corner-detection.md | 129 ++ docs/Platform-information/detect-cpu.md | 128 ++ docs/Platform-information/detect-os.md | 138 ++ .../Python-automation/activate-python-venv.md | 121 ++ .../get-generic-python-lib.md | 681 +++++++++ docs/Python-automation/get-python3.md | 169 +++ .../install-generic-conda-package.md | 158 ++ docs/Python-automation/install-python-src.md | 182 +++ docs/Python-automation/install-python-venv.md | 152 ++ docs/Remote-automation/remote-run-commands.md | 145 ++ .../app-mlperf-inference-nvidia.md | 1333 +++++++++++++++++ .../reproduce-mlperf-octoml-tinyml-results.md | 214 +++ .../reproduce-mlperf-training-nvidia.md | 169 +++ ...pper-reproduce-octoml-tinyml-submission.md | 140 ++ .../get-ipol-src.md | 146 ++ .../process-ae-users.md | 136 ++ .../reproduce-ipol-paper-2022-439.md | 148 ++ .../reproduce-micro-paper-2023-victima.md | 179 +++ docs/Tests/print-croissant-desc.md | 144 ++ docs/Tests/print-hello-world-java.md | 123 ++ docs/Tests/print-hello-world-javac.md | 123 ++ docs/Tests/print-hello-world-py.md | 129 ++ docs/Tests/print-hello-world.md | 155 ++ docs/Tests/print-python-version.md | 121 ++ docs/Tests/run-python.md | 138 ++ docs/Tests/test-deps-conditions.md | 151 ++ .../test-download-and-extract-artifacts.md | 123 ++ docs/Tests/test-set-sys-user-cm.md | 118 ++ docs/Tests/upgrade-python-pip.md | 123 ++ .../create-fpgaconvnet-app-tinyml.md | 156 ++ .../create-fpgaconvnet-config-tinyml.md | 173 +++ docs/TinyML-automation/flash-tinyml-binary.md | 175 +++ docs/TinyML-automation/get-microtvm.md | 162 ++ docs/TinyML-automation/get-zephyr-sdk.md | 126 ++ docs/TinyML-automation/get-zephyr.md | 132 ++ docs/img/logo_v2.svg | 6 + docs/img/pages (80).png | Bin 0 -> 242952 bytes docs/index.md | 1 + docs/requirements.txt | 4 + mkdocs.yml | 327 ++++ mkdocsHelper.py | 87 ++ 255 files changed, 48837 insertions(+) create mode 100644 .github/workflows/publish.yaml create mode 100644 docs/AI-ML-datasets/get-croissant.md create mode 100644 docs/AI-ML-datasets/get-dataset-cifar10.md create mode 100644 docs/AI-ML-datasets/get-dataset-cnndm.md create mode 100644 docs/AI-ML-datasets/get-dataset-coco.md create mode 100644 docs/AI-ML-datasets/get-dataset-coco2014.md create mode 100644 docs/AI-ML-datasets/get-dataset-criteo.md create mode 100644 docs/AI-ML-datasets/get-dataset-imagenet-aux.md create mode 100644 docs/AI-ML-datasets/get-dataset-imagenet-calibration.md create mode 100644 docs/AI-ML-datasets/get-dataset-imagenet-helper.md create mode 100644 docs/AI-ML-datasets/get-dataset-imagenet-train.md create mode 100644 docs/AI-ML-datasets/get-dataset-imagenet-val.md create mode 100644 docs/AI-ML-datasets/get-dataset-kits19.md create mode 100644 docs/AI-ML-datasets/get-dataset-librispeech.md create mode 100644 docs/AI-ML-datasets/get-dataset-openimages-annotations.md create mode 100644 docs/AI-ML-datasets/get-dataset-openimages-calibration.md create mode 100644 docs/AI-ML-datasets/get-dataset-openimages.md create mode 100644 docs/AI-ML-datasets/get-dataset-openorca.md create mode 100644 docs/AI-ML-datasets/get-dataset-squad-vocab.md create mode 100644 docs/AI-ML-datasets/get-dataset-squad.md create mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-criteo.md create mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-generic.md create mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-imagenet.md create mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-kits19.md create mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-librispeech.md create mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-openimages.md create mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-openorca.md create mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-squad.md create mode 100644 docs/AI-ML-frameworks/get-google-saxml.md create mode 100644 docs/AI-ML-frameworks/get-onnxruntime-prebuilt.md create mode 100644 docs/AI-ML-frameworks/get-qaic-apps-sdk.md create mode 100644 docs/AI-ML-frameworks/get-qaic-platform-sdk.md create mode 100644 docs/AI-ML-frameworks/get-qaic-software-kit.md create mode 100644 docs/AI-ML-frameworks/get-rocm.md create mode 100644 docs/AI-ML-frameworks/get-tvm.md create mode 100644 docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src.md create mode 100644 docs/AI-ML-frameworks/install-rocm.md create mode 100644 docs/AI-ML-frameworks/install-tensorflow-for-c.md create mode 100644 docs/AI-ML-frameworks/install-tensorflow-from-src.md create mode 100644 docs/AI-ML-frameworks/install-tflite-from-src.md create mode 100644 docs/AI-ML-models/convert-ml-model-huggingface-to-onnx.md create mode 100644 docs/AI-ML-models/get-bert-squad-vocab.md create mode 100644 docs/AI-ML-models/get-dlrm.md create mode 100644 docs/AI-ML-models/get-ml-model-3d-unet-kits19.md create mode 100644 docs/AI-ML-models/get-ml-model-bert-base-squad.md create mode 100644 docs/AI-ML-models/get-ml-model-bert-large-squad.md create mode 100644 docs/AI-ML-models/get-ml-model-dlrm-terabyte.md create mode 100644 docs/AI-ML-models/get-ml-model-efficientnet-lite.md create mode 100644 docs/AI-ML-models/get-ml-model-gptj.md create mode 100644 docs/AI-ML-models/get-ml-model-huggingface-zoo.md create mode 100644 docs/AI-ML-models/get-ml-model-llama2.md create mode 100644 docs/AI-ML-models/get-ml-model-mobilenet.md create mode 100644 docs/AI-ML-models/get-ml-model-neuralmagic-zoo.md create mode 100644 docs/AI-ML-models/get-ml-model-resnet50.md create mode 100644 docs/AI-ML-models/get-ml-model-retinanet-nvidia.md create mode 100644 docs/AI-ML-models/get-ml-model-retinanet.md create mode 100644 docs/AI-ML-models/get-ml-model-rnnt.md create mode 100644 docs/AI-ML-models/get-ml-model-stable-diffusion.md create mode 100644 docs/AI-ML-models/get-ml-model-tiny-resnet.md create mode 100644 docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo.md create mode 100644 docs/AI-ML-models/get-tvm-model.md create mode 100644 docs/AI-ML-optimization/calibrate-model-for.qaic.md create mode 100644 docs/AI-ML-optimization/compile-model-for.qaic.md create mode 100644 docs/AI-ML-optimization/prune-bert-models.md create mode 100644 docs/CM-interface-prototyping/test-mlperf-inference-retinanet.md create mode 100644 docs/CUDA-automation/get-cuda-devices.md create mode 100644 docs/CUDA-automation/get-cuda.md create mode 100644 docs/CUDA-automation/get-cudnn.md create mode 100644 docs/CUDA-automation/get-tensorrt.md create mode 100644 docs/CUDA-automation/install-cuda-package-manager.md create mode 100644 docs/CUDA-automation/install-cuda-prebuilt.md create mode 100644 docs/Cloud-automation/destroy-terraform.md create mode 100644 docs/Cloud-automation/get-aws-cli.md create mode 100644 docs/Cloud-automation/get-terraform.md create mode 100644 docs/Cloud-automation/install-aws-cli.md create mode 100644 docs/Cloud-automation/install-terraform-from-src.md create mode 100644 docs/Cloud-automation/run-terraform.md create mode 100644 docs/Collective-benchmarking/launch-benchmark.md create mode 100644 docs/Compiler-automation/get-aocl.md create mode 100644 docs/Compiler-automation/get-cl.md create mode 100644 docs/Compiler-automation/get-compiler-flags.md create mode 100644 docs/Compiler-automation/get-compiler-rust.md create mode 100644 docs/Compiler-automation/get-gcc.md create mode 100644 docs/Compiler-automation/get-go.md create mode 100644 docs/Compiler-automation/get-llvm.md create mode 100644 docs/Compiler-automation/install-gcc-src.md create mode 100644 docs/Compiler-automation/install-ipex-from-src.md create mode 100644 docs/Compiler-automation/install-llvm-prebuilt.md create mode 100644 docs/Compiler-automation/install-llvm-src.md create mode 100644 docs/Compiler-automation/install-onednn-from-src.md create mode 100644 docs/Compiler-automation/install-onnxruntime-from-src.md create mode 100644 docs/Compiler-automation/install-pytorch-from-src.md create mode 100644 docs/Compiler-automation/install-pytorch-kineto-from-src.md create mode 100644 docs/Compiler-automation/install-torchvision-from-src.md create mode 100644 docs/Compiler-automation/install-tpp-pytorch-extension.md create mode 100644 docs/Compiler-automation/install-transformers-from-src.md create mode 100644 docs/Dashboard-automation/publish-results-to-dashboard.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-aria2.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-bazel.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-blis.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-brew.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-cmake.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-docker.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-google-test.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-java.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-javac.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-openssl.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-rclone.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/install-bazel.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/install-gflags.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/install-openssl.md create mode 100644 docs/DevOps-automation/benchmark-program.md create mode 100644 docs/DevOps-automation/compile-program.md create mode 100644 docs/DevOps-automation/convert-csv-to-md.md create mode 100644 docs/DevOps-automation/copy-to-clipboard.md create mode 100644 docs/DevOps-automation/create-conda-env.md create mode 100644 docs/DevOps-automation/create-patch.md create mode 100644 docs/DevOps-automation/detect-sudo.md create mode 100644 docs/DevOps-automation/download-and-extract.md create mode 100644 docs/DevOps-automation/download-file.md create mode 100644 docs/DevOps-automation/download-torrent.md create mode 100644 docs/DevOps-automation/extract-file.md create mode 100644 docs/DevOps-automation/fail.md create mode 100644 docs/DevOps-automation/get-conda.md create mode 100644 docs/DevOps-automation/get-git-repo.md create mode 100644 docs/DevOps-automation/get-github-cli.md create mode 100644 docs/DevOps-automation/pull-git-repo.md create mode 100644 docs/DevOps-automation/push-csv-to-spreadsheet.md create mode 100644 docs/DevOps-automation/set-device-settings-qaic.md create mode 100644 docs/DevOps-automation/set-echo-off-win.md create mode 100644 docs/DevOps-automation/set-performance-mode.md create mode 100644 docs/DevOps-automation/set-sqlite-dir.md create mode 100644 docs/DevOps-automation/tar-my-folder.md create mode 100644 docs/Docker-automation/build-docker-image.md create mode 100644 docs/Docker-automation/build-dockerfile.md create mode 100644 docs/Docker-automation/prune-docker.md create mode 100644 docs/Docker-automation/run-docker-container.md create mode 100644 docs/GUI/gui.md create mode 100644 docs/Legacy-CK-support/get-ck-repo-mlops.md create mode 100644 docs/Legacy-CK-support/get-ck.md create mode 100644 docs/MLPerf-benchmark-support/add-custom-nvidia-system.md create mode 100644 docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation.md create mode 100644 docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia.md create mode 100644 docs/MLPerf-benchmark-support/generate-mlperf-inference-submission.md create mode 100644 docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf.md create mode 100644 docs/MLPerf-benchmark-support/generate-mlperf-tiny-report.md create mode 100644 docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission.md create mode 100644 docs/MLPerf-benchmark-support/generate-nvidia-engine.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-results.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-src.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-logging.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-power-dev.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-tiny-src.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code.md create mode 100644 docs/MLPerf-benchmark-support/get-mlperf-training-src.md create mode 100644 docs/MLPerf-benchmark-support/get-nvidia-mitten.md create mode 100644 docs/MLPerf-benchmark-support/get-spec-ptd.md create mode 100644 docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment.md create mode 100644 docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment.md create mode 100644 docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment.md create mode 100644 docs/MLPerf-benchmark-support/install-mlperf-logging-from-src.md create mode 100644 docs/MLPerf-benchmark-support/prepare-training-data-bert.md create mode 100644 docs/MLPerf-benchmark-support/prepare-training-data-resnet.md create mode 100644 docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission.md create mode 100644 docs/MLPerf-benchmark-support/process-mlperf-accuracy.md create mode 100644 docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github.md create mode 100644 docs/MLPerf-benchmark-support/run-all-mlperf-models.md create mode 100644 docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models.md create mode 100644 docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker.md create mode 100644 docs/MLPerf-benchmark-support/run-mlperf-power-client.md create mode 100644 docs/MLPerf-benchmark-support/run-mlperf-power-server.md create mode 100644 docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker.md create mode 100644 docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log.md create mode 100644 docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py.md create mode 100644 docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp.md create mode 100644 docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py.md create mode 100644 docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py.md create mode 100644 docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py.md create mode 100644 docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy.md create mode 100644 docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel.md create mode 100644 docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm.md create mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python.md create mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite.md create mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp.md create mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python.md create mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference.md create mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf.md create mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app.md create mode 100644 docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia.md create mode 100644 docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference.md create mode 100644 docs/Modular-application-pipeline/app-image-corner-detection.md create mode 100644 docs/Platform-information/detect-cpu.md create mode 100644 docs/Platform-information/detect-os.md create mode 100644 docs/Python-automation/activate-python-venv.md create mode 100644 docs/Python-automation/get-generic-python-lib.md create mode 100644 docs/Python-automation/get-python3.md create mode 100644 docs/Python-automation/install-generic-conda-package.md create mode 100644 docs/Python-automation/install-python-src.md create mode 100644 docs/Python-automation/install-python-venv.md create mode 100644 docs/Remote-automation/remote-run-commands.md create mode 100644 docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia.md create mode 100644 docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results.md create mode 100644 docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia.md create mode 100644 docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission.md create mode 100644 docs/Reproducibility-and-artifact-evaluation/get-ipol-src.md create mode 100644 docs/Reproducibility-and-artifact-evaluation/process-ae-users.md create mode 100644 docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439.md create mode 100644 docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima.md create mode 100644 docs/Tests/print-croissant-desc.md create mode 100644 docs/Tests/print-hello-world-java.md create mode 100644 docs/Tests/print-hello-world-javac.md create mode 100644 docs/Tests/print-hello-world-py.md create mode 100644 docs/Tests/print-hello-world.md create mode 100644 docs/Tests/print-python-version.md create mode 100644 docs/Tests/run-python.md create mode 100644 docs/Tests/test-deps-conditions.md create mode 100644 docs/Tests/test-download-and-extract-artifacts.md create mode 100644 docs/Tests/test-set-sys-user-cm.md create mode 100644 docs/Tests/upgrade-python-pip.md create mode 100644 docs/TinyML-automation/create-fpgaconvnet-app-tinyml.md create mode 100644 docs/TinyML-automation/create-fpgaconvnet-config-tinyml.md create mode 100644 docs/TinyML-automation/flash-tinyml-binary.md create mode 100644 docs/TinyML-automation/get-microtvm.md create mode 100644 docs/TinyML-automation/get-zephyr-sdk.md create mode 100644 docs/TinyML-automation/get-zephyr.md create mode 100644 docs/img/logo_v2.svg create mode 100644 docs/img/pages (80).png create mode 100644 docs/index.md create mode 100644 docs/requirements.txt create mode 100644 mkdocs.yml create mode 100644 mkdocsHelper.py diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml new file mode 100644 index 000000000..5df31b971 --- /dev/null +++ b/.github/workflows/publish.yaml @@ -0,0 +1,33 @@ +# This is a basic workflow to help you get started with Actions + +name: Publish site + + +on: + release: + types: [published] + push: + branches: + - main + - docs + +jobs: + + publish: + name: Publish the site + runs-on: ubuntu-latest + + steps: + - name: Checkout repository normally + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.11" + + - name: Install Mkdocs + run: pip install -r docs/requirements.txt + + - name: Run Mkdocs deploy + run: mkdocs gh-deploy --force diff --git a/docs/AI-ML-datasets/get-croissant.md b/docs/AI-ML-datasets/get-croissant.md new file mode 100644 index 000000000..3c62b3bc5 --- /dev/null +++ b/docs/AI-ML-datasets/get-croissant.md @@ -0,0 +1,126 @@ +Automatically generated README for this automation recipe: **get-croissant** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-croissant,8fd653eac8da4c14) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlcommons,croissant* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlcommons croissant" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlcommons,croissant` + +`cm run script --tags=get,mlcommons,croissant ` + +*or* + +`cmr "get mlcommons croissant"` + +`cmr "get mlcommons croissant " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlcommons,croissant' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlcommons,croissant"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlcommons croissant" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * git,repo,_repo.https://github.com/mlcommons/croissant + * CM names: `--adr.['git-mlcommons-croissant']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/_cm.yaml) + +___ +### Script output +`cmr "get mlcommons croissant " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-cifar10.md b/docs/AI-ML-datasets/get-dataset-cifar10.md new file mode 100644 index 000000000..e6caa091c --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-cifar10.md @@ -0,0 +1,164 @@ +Automatically generated README for this automation recipe: **get-dataset-cifar10** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-cifar10,2f0c0bb3663b4ed7) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,cifar10,image-classification,validation,training* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset cifar10 image-classification validation training" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,cifar10,image-classification,validation,training` + +`cm run script --tags=get,dataset,cifar10,image-classification,validation,training[,variations] ` + +*or* + +`cmr "get dataset cifar10 image-classification validation training"` + +`cmr "get dataset cifar10 image-classification validation training [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,cifar10,image-classification,validation,training' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,cifar10,image-classification,validation,training"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset cifar10 image-classification validation training[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_tiny` + - Environment variables: + - *CM_DATASET_CONVERT_TO_TINYMLPERF*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,tinymlperf,src + - CM script: [get-mlperf-tiny-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-tiny-src) + * get,src,eembc,energy-runner + - CM script: [get-mlperf-tiny-eembc-energy-runner-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-tiny-eembc-energy-runner-src) + +
+ + + * Group "**data_format**" +
+ Click here to expand this section. + + * **`_python`** (default) + - Environment variables: + - *CM_DATASET*: `CIFAR10` + - *CM_DATASET_FILENAME*: `cifar-10-python.tar.gz` + - *CM_DATASET_FILENAME1*: `cifar-10-python.tar` + - *CM_DATASET_CIFAR10*: `https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz` + - Workflow: + +
+ + +#### Default variations + +`_python` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/_cm.json) + +___ +### Script output +`cmr "get dataset cifar10 image-classification validation training [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-cnndm.md b/docs/AI-ML-datasets/get-dataset-cnndm.md new file mode 100644 index 000000000..85be98b6a --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-cnndm.md @@ -0,0 +1,175 @@ +Automatically generated README for this automation recipe: **get-dataset-cnndm** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-cnndm,aed298c156e24257) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,gpt-j,cnndm,cnn-dailymail,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset gpt-j cnndm cnn-dailymail original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,gpt-j,cnndm,cnn-dailymail,original` + +`cm run script --tags=get,dataset,gpt-j,cnndm,cnn-dailymail,original[,variations] ` + +*or* + +`cmr "get dataset gpt-j cnndm cnn-dailymail original"` + +`cmr "get dataset gpt-j cnndm cnn-dailymail original [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,gpt-j,cnndm,cnn-dailymail,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,gpt-j,cnndm,cnn-dailymail,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset gpt-j cnndm cnn-dailymail original[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_intel` + - Workflow: + * `_intel,validation` + - Environment variables: + - *CM_CNNDM_INTEL_VARIATION*: `yes` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION*: `yes` + - Workflow: + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_CALIBRATION*: `no` + - Workflow: + +
+ + +#### Default variations + +`_validation` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_CALIBRATION: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/_cm.json)*** + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * mlperf,inference,source + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CNNDM_INTEL_VARIATION': ['yes']}` + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_package.simplejson + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_datasets + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.tokenizers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/_cm.json) + 1. ***Run native script if exists*** + * [run-intel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/run-intel.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/_cm.json) + +___ +### Script output +`cmr "get dataset gpt-j cnndm cnn-dailymail original [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-coco.md b/docs/AI-ML-datasets/get-dataset-coco.md new file mode 100644 index 000000000..33aded32e --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-coco.md @@ -0,0 +1,215 @@ +Automatically generated README for this automation recipe: **get-dataset-coco** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-coco,c198e1f60ac6445c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,object-detection,coco* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset object-detection coco" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,object-detection,coco` + +`cm run script --tags=get,dataset,object-detection,coco[,variations] [--input_flags]` + +*or* + +`cmr "get dataset object-detection coco"` + +`cmr "get dataset object-detection coco [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,object-detection,coco' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,object-detection,coco"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset object-detection coco[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**size**" +
+ Click here to expand this section. + + * **`_complete`** (default) + - Environment variables: + - *CM_DATASET_COCO_SIZE*: `complete` + - Workflow: + * `_small` + - Environment variables: + - *CM_DATASET_COCO_SIZE*: `small` + - Workflow: + +
+ + + * Group "**type**" +
+ Click here to expand this section. + + * `_train` + - Environment variables: + - *CM_DATASET_COCO_TYPE*: `train` + - Workflow: + * **`_val`** (default) + - Environment variables: + - *CM_DATASET_COCO_TYPE*: `val` + - Workflow: + +
+ + + * Group "**version**" +
+ Click here to expand this section. + + * **`_2017`** (default) + - Environment variables: + - *CM_DATASET_COCO_VERSION*: `2017` + - Workflow: + +
+ + +#### Default variations + +`_2017,_complete,_val` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--from=value` → `CM_FROM=value` +* `--home=value` → `CM_HOME_DIR=value` +* `--store=value` → `CM_STORE=value` +* `--to=value` → `CM_TO=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "from":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/_cm.json)*** + * download-and-extract,file,_wget,_extract + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_COCO_DETECTED': ['yes']}` + * CM names: `--adr.['get-dataset-coco-data', '746e5dad5e784ad6']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + * download-and-extract,file,_wget,_extract + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_COCO_DETECTED': ['yes']}` + * CM names: `--adr.['get-dataset-coco-annotations', 'edb6cd092ff64171']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/_cm.json) + +___ +### Script output +`cmr "get dataset object-detection coco [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_COCO*` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` +#### New environment keys auto-detected from customize + +* `CM_DATASET_COCO_ANNOTATIONS_PATH` +* `CM_DATASET_COCO_DATA_PATH` +* `CM_DATASET_COCO_DETECTED` +* `CM_DATASET_COCO_MD5SUM_ANN` +* `CM_DATASET_COCO_MD5SUM_DATA` +* `CM_DATASET_COCO_PATH` +* `CM_DATASET_COCO_TYPE` +* `CM_DATASET_COCO_TYPE_AND_VERSION` +* `CM_DATASET_COCO_URL_ANNOTATIONS_FULL` +* `CM_DATASET_COCO_URL_DATA_FULL` +* `CM_DATASET_COCO_VERSION` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-coco2014.md b/docs/AI-ML-datasets/get-dataset-coco2014.md new file mode 100644 index 000000000..e13dc04fe --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-coco2014.md @@ -0,0 +1,204 @@ +Automatically generated README for this automation recipe: **get-dataset-coco2014** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-coco2014,3f7ad9d42f4040f8) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,coco2014,object-detection,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset coco2014 object-detection original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,coco2014,object-detection,original` + +`cm run script --tags=get,dataset,coco2014,object-detection,original[,variations] ` + +*or* + +`cmr "get dataset coco2014 object-detection original"` + +`cmr "get dataset coco2014 object-detection original [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,coco2014,object-detection,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,coco2014,object-detection,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset coco2014 object-detection original[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**annotations**" +
+ Click here to expand this section. + + * `_custom-annotations` + - Environment variables: + - *CM_DATASET_COCO2014_CUSTOM_ANNOTATIONS*: `yes` + - Workflow: + * **`_default-annotations`** (default) + - Environment variables: + - *CM_DATASET_COCO2014_CUSTOM_ANNOTATIONS*: `no` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION*: `yes` + - Workflow: + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_CALIBRATION*: `no` + - Workflow: + +
+ + + * Group "**size**" +
+ Click here to expand this section. + + * **`_50`** (default) + - Environment variables: + - *CM_DATASET_SIZE*: `50` + - Workflow: + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `` + - Workflow: + * `_size.#` + - Environment variables: + - *CM_DATASET_SIZE*: `#` + - Workflow: + +
+ + +#### Default variations + +`_50,_default-annotations,_validation` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_CALIBRATION: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/_cm.yaml)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_package.tqdm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * mlperf,inference,source + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/run.sh) + 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/_cm.yaml)*** + * get,coco2014,annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_COCO2014_CUSTOM_ANNOTATIONS': ['yes']}` + - *Warning: no scripts found* + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/_cm.yaml) + +___ +### Script output +`cmr "get dataset coco2014 object-detection original [,variations]" -j` +#### New environment keys (filter) + +* `CM_CALIBRATION_DATASET_PATH` +* `CM_DATASET_ANNOTATIONS_DIR_PATH` +* `CM_DATASET_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` +#### New environment keys auto-detected from customize + +* `CM_CALIBRATION_DATASET_PATH` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-criteo.md b/docs/AI-ML-datasets/get-dataset-criteo.md new file mode 100644 index 000000000..5f2b29d83 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-criteo.md @@ -0,0 +1,154 @@ +Automatically generated README for this automation recipe: **get-dataset-criteo** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-criteo,194a47d908714897) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,criteo,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset criteo original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,criteo,original` + +`cm run script --tags=get,dataset,criteo,original[,variations] [--input_flags]` + +*or* + +`cmr "get dataset criteo original"` + +`cmr "get dataset criteo original [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,criteo,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,criteo,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset criteo original[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_backup` + - Environment variables: + - *CM_BACKUP_ZIPS*: `yes` + - Workflow: + * `_fake` + - Environment variables: + - *CM_CRITEO_FAKE*: `yes` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--criteo_path=value` → `CM_CRITEO_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "criteo_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BACKUP_ZIPS: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/_cm.json) + +___ +### Script output +`cmr "get dataset criteo original [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-aux.md b/docs/AI-ML-datasets/get-dataset-imagenet-aux.md new file mode 100644 index 000000000..e5d3a126c --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-imagenet-aux.md @@ -0,0 +1,155 @@ +Automatically generated README for this automation recipe: **get-dataset-imagenet-aux** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-aux,bb2c6dd8c8c64217) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,aux,dataset-aux,image-classification,imagenet-aux* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get aux dataset-aux image-classification imagenet-aux" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,aux,dataset-aux,image-classification,imagenet-aux` + +`cm run script --tags=get,aux,dataset-aux,image-classification,imagenet-aux[,variations] ` + +*or* + +`cmr "get aux dataset-aux image-classification imagenet-aux"` + +`cmr "get aux dataset-aux image-classification imagenet-aux [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,aux,dataset-aux,image-classification,imagenet-aux' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,aux,dataset-aux,image-classification,imagenet-aux"``` + +#### Run this script via Docker (beta) + +`cm docker script "get aux dataset-aux image-classification imagenet-aux[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_2012` + - Environment variables: + - *CM_DATASET_AUX_VER*: `2012` + - Workflow: + +
+ + + * Group "**download-source**" +
+ Click here to expand this section. + + * `_from.berkeleyvision` + - Environment variables: + - *CM_WGET_URL*: `http://dl.caffe.berkeleyvision.org/caffe_ilsvrc12.tar.gz` + - Workflow: + * **`_from.dropbox`** (default) + - Environment variables: + - *CM_WGET_URL*: `https://www.dropbox.com/s/92n2fyej3lzy3s3/caffe_ilsvrc12.tar.gz` + - Workflow: + +
+ + +#### Default variations + +`_from.dropbox` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/_cm.json) + +___ +### Script output +`cmr "get aux dataset-aux image-classification imagenet-aux [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_AUX_*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-calibration.md b/docs/AI-ML-datasets/get-dataset-imagenet-calibration.md new file mode 100644 index 000000000..76ae3ca52 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-imagenet-calibration.md @@ -0,0 +1,146 @@ +Automatically generated README for this automation recipe: **get-dataset-imagenet-calibration** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-calibration,30361fad3dff49ff) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,imagenet,calibration* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset imagenet calibration" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,imagenet,calibration` + +`cm run script --tags=get,dataset,imagenet,calibration[,variations] ` + +*or* + +`cmr "get dataset imagenet calibration"` + +`cmr "get dataset imagenet calibration [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,imagenet,calibration' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,imagenet,calibration"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset imagenet calibration[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**calibration-option**" +
+ Click here to expand this section. + + * **`_mlperf.option1`** (default) + - Environment variables: + - *CM_MLPERF_IMAGENET_CALIBRATION_OPTION*: `one` + - *CM_DOWNLOAD_CHECKSUM*: `f09719174af3553119e2c621157773a6` + - Workflow: + * `_mlperf.option2` + - Environment variables: + - *CM_MLPERF_IMAGENET_CALIBRATION_OPTION*: `two` + - *CM_DOWNLOAD_CHECKSUM*: `e44582af00e3b4fc3fac30efd6bdd05f` + - Workflow: + +
+ + +#### Default variations + +`_mlperf.option1` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration/_cm.yaml)*** + * download,file + * CM names: `--adr.['calibration-file-downloader']...` + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration/_cm.yaml) + +___ +### Script output +`cmr "get dataset imagenet calibration [,variations]" -j` +#### New environment keys (filter) + +* `CM_MLPERF_IMAGENET_CALIBRATION_LIST_FILE_WITH_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-helper.md b/docs/AI-ML-datasets/get-dataset-imagenet-helper.md new file mode 100644 index 000000000..6ce0dc22e --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-imagenet-helper.md @@ -0,0 +1,120 @@ +Automatically generated README for this automation recipe: **get-dataset-imagenet-helper** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-helper,a6c3c321d07742f9) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,imagenet,helper,imagenet-helper* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get imagenet helper imagenet-helper" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,imagenet,helper,imagenet-helper` + +`cm run script --tags=get,imagenet,helper,imagenet-helper ` + +*or* + +`cmr "get imagenet helper imagenet-helper"` + +`cmr "get imagenet helper imagenet-helper " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,imagenet,helper,imagenet-helper' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,imagenet,helper,imagenet-helper"``` + +#### Run this script via Docker (beta) + +`cm docker script "get imagenet helper imagenet-helper" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/_cm.json) + +___ +### Script output +`cmr "get imagenet helper imagenet-helper " -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_DATASET_IMAGENET_HELPER_PATH` +#### New environment keys auto-detected from customize + +* `CM_DATASET_IMAGENET_HELPER_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-train.md b/docs/AI-ML-datasets/get-dataset-imagenet-train.md new file mode 100644 index 000000000..a6c7feb9f --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-imagenet-train.md @@ -0,0 +1,149 @@ +Automatically generated README for this automation recipe: **get-dataset-imagenet-train** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-train,2bec165da5cc4ebf) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,imagenet,train,dataset,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get imagenet train dataset original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,imagenet,train,dataset,original` + +`cm run script --tags=get,imagenet,train,dataset,original [--input_flags]` + +*or* + +`cmr "get imagenet train dataset original"` + +`cmr "get imagenet train dataset original " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,imagenet,train,dataset,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,imagenet,train,dataset,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get imagenet train dataset original" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `IMAGENET_TRAIN_PATH=value` +* `--torrent=value` → `CM_DATASET_IMAGENET_TRAIN_TORRENT_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/_cm.json)*** + * download-and-extract,file,_extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_IMAGENET_VAL_REQUIRE_DAE': ['yes', 'True']}` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + * file,extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_DAE_ONLY_EXTRACT': ['yes', 'True']}` + - CM script: [extract-file](https://github.com/mlcommons/cm4mlops/tree/master/script/extract-file) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/_cm.json) + +___ +### Script output +`cmr "get imagenet train dataset original " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_IMAGENET_*` +* `CM_DATASET_PATH` +#### New environment keys auto-detected from customize + +* `CM_DATASET_IMAGENET_PATH` +* `CM_DATASET_IMAGENET_TRAIN_PATH` +* `CM_DATASET_IMAGENET_TRAIN_REQUIRE_DAE` +* `CM_DATASET_IMAGENET_VAL_REQUIRE_DAE` +* `CM_DATASET_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-val.md b/docs/AI-ML-datasets/get-dataset-imagenet-val.md new file mode 100644 index 000000000..09c78b485 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-imagenet-val.md @@ -0,0 +1,211 @@ +Automatically generated README for this automation recipe: **get-dataset-imagenet-val** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-val,7afd58d287fe4f11) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,val,validation,dataset,imagenet,ILSVRC,image-classification,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get val validation dataset imagenet ILSVRC image-classification original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,val,validation,dataset,imagenet,ILSVRC,image-classification,original` + +`cm run script --tags=get,val,validation,dataset,imagenet,ILSVRC,image-classification,original[,variations] [--input_flags]` + +*or* + +`cmr "get val validation dataset imagenet ILSVRC image-classification original"` + +`cmr "get val validation dataset imagenet ILSVRC image-classification original [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,val,validation,dataset,imagenet,ILSVRC,image-classification,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,val,validation,dataset,imagenet,ILSVRC,image-classification,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get val validation dataset imagenet ILSVRC image-classification original[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_2012-500` + - Workflow: + * `_2012-full` + - Workflow: + * `_run-during-docker-build` + - Workflow: + +
+ + + * Group "**count**" +
+ Click here to expand this section. + + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `50000` + - *CM_IMAGENET_FULL*: `yes` + - *CM_DAE_FILENAME*: `ILSVRC2012_img_val.tar` + - *CM_DAE_DOWNLOADED_CHECKSUM*: `29b22e2961454d5413ddabcf34fc5622` + - Workflow: + * `_size.#` + - Environment variables: + - *CM_DATASET_SIZE*: `#` + - Workflow: + * **`_size.500`** (default) + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - *CM_DAE_FILENAME*: `ILSVRC2012_img_val_500.tar` + - *CM_DAE_URL*: `http://cKnowledge.org/ai/data/ILSVRC2012_img_val_500.tar` + - Workflow: + +
+ + + * Group "**dataset-version**" +
+ Click here to expand this section. + + * **`_2012`** (default) + - Environment variables: + - *CM_DATASET_VER*: `2012` + - Workflow: + +
+ + +#### Default variations + +`_2012,_size.500` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--imagenet_path=value` → `IMAGENET_PATH=value` +* `--torrent=value` → `CM_DATASET_IMAGENET_VAL_TORRENT_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "imagenet_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/_cm.json)*** + * download-and-extract,file,_extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_IMAGENET_VAL_REQUIRE_DAE': ['yes', 'True']}` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + * file,extract,_no-remove-extracted + * Enable this dependency only if all ENV vars are set:
+`{'CM_DAE_ONLY_EXTRACT': ['yes', 'True']}` + - CM script: [extract-file](https://github.com/mlcommons/cm4mlops/tree/master/script/extract-file) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/run.bat) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/_cm.json) + +___ +### Script output +`cmr "get val validation dataset imagenet ILSVRC image-classification original [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_IMAGENET_PATH` +* `CM_DATASET_IMAGENET_VAL_PATH` +* `CM_DATASET_PATH` +* `CM_DATASET_SIZE` +* `CM_DATASET_VER` +#### New environment keys auto-detected from customize + +* `CM_DATASET_IMAGENET_PATH` +* `CM_DATASET_IMAGENET_VAL_PATH` +* `CM_DATASET_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-kits19.md b/docs/AI-ML-datasets/get-dataset-kits19.md new file mode 100644 index 000000000..53f222b56 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-kits19.md @@ -0,0 +1,172 @@ +Automatically generated README for this automation recipe: **get-dataset-kits19** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-kits19,79992bb221024ac5) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,medical-imaging,kits,original,kits19* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset medical-imaging kits original kits19" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,medical-imaging,kits,original,kits19` + +`cm run script --tags=get,dataset,medical-imaging,kits,original,kits19[,variations] ` + +*or* + +`cmr "get dataset medical-imaging kits original kits19"` + +`cmr "get dataset medical-imaging kits original kits19 [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,medical-imaging,kits,original,kits19' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,medical-imaging,kits,original,kits19"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset medical-imaging kits original kits19[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION*: `yes` + - Workflow: + * `_default` + - Environment variables: + - *CM_GIT_PATCH*: `no` + - Workflow: + * `_full-history` + - Environment variables: + - *CM_GIT_DEPTH*: `` + - Workflow: + * `_no-recurse-submodules` + - Environment variables: + - *CM_GIT_RECURSE_SUBMODULES*: `` + - Workflow: + * `_patch` + - Environment variables: + - *CM_GIT_PATCH*: `yes` + - Workflow: + * `_short-history` + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 5` + - Workflow: + * `_validation` + - Environment variables: + - *CM_DATASET_VALIDATION*: `yes` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `master` +* CM_GIT_DEPTH: `--depth 2` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: `` +* CM_GIT_URL: `https://github.com/neheller/kits19` + +
+ +#### Versions +Default version: `master` + +* `custom` +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/_cm.json) + +___ +### Script output +`cmr "get dataset medical-imaging kits original kits19 [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-librispeech.md b/docs/AI-ML-datasets/get-dataset-librispeech.md new file mode 100644 index 000000000..170522f4c --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-librispeech.md @@ -0,0 +1,134 @@ +Automatically generated README for this automation recipe: **get-dataset-librispeech** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-librispeech,09f29df607e0415d) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset speech speech-recognition librispeech validation audio training original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original` + +`cm run script --tags=get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original ` + +*or* + +`cmr "get dataset speech speech-recognition librispeech validation audio training original"` + +`cmr "get dataset speech speech-recognition librispeech validation audio training original " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset speech speech-recognition librispeech validation audio training original" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `dev-clean` + +* `dev-clean` +* `dev-other` +* `test-clean` +* `test-other` +* `train-clean-100` +* `train-clean-360` +* `train-other-500` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/_cm.json)*** + * get,sys-utils-cm + * CM names: `--adr.['sys-utils']...` + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/_cm.json) + +___ +### Script output +`cmr "get dataset speech speech-recognition librispeech validation audio training original " -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_LIBRISPEECH_PATH` +* `CM_DATASET_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-openimages-annotations.md b/docs/AI-ML-datasets/get-dataset-openimages-annotations.md new file mode 100644 index 000000000..c7b470c4d --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-openimages-annotations.md @@ -0,0 +1,144 @@ +Automatically generated README for this automation recipe: **get-dataset-openimages-annotations** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-openimages-annotations,47e2158ed24c44e9) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,aux,dataset-aux,object-detection,openimages,annotations* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get aux dataset-aux object-detection openimages annotations" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,aux,dataset-aux,object-detection,openimages,annotations` + +`cm run script --tags=get,aux,dataset-aux,object-detection,openimages,annotations[,variations] ` + +*or* + +`cmr "get aux dataset-aux object-detection openimages annotations"` + +`cmr "get aux dataset-aux object-detection openimages annotations [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,aux,dataset-aux,object-detection,openimages,annotations' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,aux,dataset-aux,object-detection,openimages,annotations"``` + +#### Run this script via Docker (beta) + +`cm docker script "get aux dataset-aux object-detection openimages annotations[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**download-source**" +
+ Click here to expand this section. + + * **`_from.github`** (default) + - Environment variables: + - *CM_WGET_URL*: `https://github.com/mlcommons/inference/releases/download/v2.1/openimages-mlperf_annotations_2.1.json.zip` + - Workflow: + +
+ + +#### Default variations + +`_from.github` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/_cm.json) + +___ +### Script output +`cmr "get aux dataset-aux object-detection openimages annotations [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_ANNOTATIONS_*` +* `CM_DATASET_OPENIMAGES_ANNOTATIONS_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_ANNOTATIONS_DIR_PATH` +* `CM_DATASET_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_OPENIMAGES_ANNOTATIONS_DIR_PATH` +* `CM_DATASET_OPENIMAGES_ANNOTATIONS_FILE_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-openimages-calibration.md b/docs/AI-ML-datasets/get-dataset-openimages-calibration.md new file mode 100644 index 000000000..969e9872d --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-openimages-calibration.md @@ -0,0 +1,178 @@ +Automatically generated README for this automation recipe: **get-dataset-openimages-calibration** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-openimages-calibration,27228976bb084dd0) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openimages,calibration* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset openimages calibration" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,openimages,calibration` + +`cm run script --tags=get,dataset,openimages,calibration[,variations] ` + +*or* + +`cmr "get dataset openimages calibration"` + +`cmr "get dataset openimages calibration [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,openimages,calibration' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,openimages,calibration"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset openimages calibration[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_filter` + - Environment variables: + - *CM_CALIBRATE_FILTER*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,openimages,dataset,original,_calibration + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + +
+ + + * Group "**calibration-option**" +
+ Click here to expand this section. + + * **`_mlperf.option1`** (default) + - Environment variables: + - *CM_MLPERF_OPENIMAGES_CALIBRATION_OPTION*: `one` + - *CM_DOWNLOAD_CHECKSUM1*: `f09719174af3553119e2c621157773a6` + - Workflow: + +
+ + + * Group "**filter-size**" +
+ Click here to expand this section. + + * `_filter-size.#` + - Environment variables: + - *CM_CALIBRATION_FILTER_SIZE*: `#` + - Workflow: + * `_filter-size.400` + - Environment variables: + - *CM_CALIBRATION_FILTER_SIZE*: `400` + - Workflow: + +
+ + +#### Default variations + +`_mlperf.option1` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/_cm.yaml)*** + * download,file + * CM names: `--adr.['calibration-file-downloader']...` + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/_cm.yaml) + 1. ***Run native script if exists*** + * [run-filter.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/run-filter.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/_cm.yaml) + +___ +### Script output +`cmr "get dataset openimages calibration [,variations]" -j` +#### New environment keys (filter) + +* `CM_MLPERF_OPENIMAGES_CALIBRATION_LIST_FILE_WITH_PATH` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_OPENIMAGES_CALIBRATION_LIST_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-openimages.md b/docs/AI-ML-datasets/get-dataset-openimages.md new file mode 100644 index 000000000..a5d30a4b0 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-openimages.md @@ -0,0 +1,250 @@ +Automatically generated README for this automation recipe: **get-dataset-openimages** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-openimages,0a9d49b644cf4142) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openimages,open-images,object-detection,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset openimages open-images object-detection original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,openimages,open-images,object-detection,original` + +`cm run script --tags=get,dataset,openimages,open-images,object-detection,original[,variations] ` + +*or* + +`cmr "get dataset openimages open-images object-detection original"` + +`cmr "get dataset openimages open-images object-detection original [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,openimages,open-images,object-detection,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,openimages,open-images,object-detection,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset openimages open-images object-detection original[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_filter` + - Workflow: + * `_filter,calibration` + - Workflow: + * `_filter-size.#` + - Workflow: + * `_using-fiftyone` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_fiftyone + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,openssl,lib + - CM script: [get-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-openssl) + +
+ + + * Group "**annotations**" +
+ Click here to expand this section. + + * `_custom-annotations` + - Environment variables: + - *CM_DATASET_OPENIMAGES_CUSTOM_ANNOTATIONS*: `yes` + - Workflow: + * **`_default-annotations`** (default) + - Environment variables: + - *CM_DATASET_OPENIMAGES_CUSTOM_ANNOTATIONS*: `no` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,openimages,calibration + * CM names: `--adr.['openimages-calibration']...` + - CM script: [get-dataset-openimages-calibration](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-calibration) + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_CALIBRATION*: `no` + - Workflow: + +
+ + + * Group "**size**" +
+ Click here to expand this section. + + * **`_50`** (default) + - Environment variables: + - *CM_DATASET_SIZE*: `50` + - Workflow: + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `` + - Workflow: + * `_size.#` + - Environment variables: + - *CM_DATASET_SIZE*: `#` + - Workflow: + +
+ + +#### Default variations + +`_50,_default-annotations,_validation` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_CALIBRATION: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/_cm.json)*** + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_requests + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * mlperf,inference,source + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_boto3 + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tqdm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycocotools + * CM names: `--adr.['pycocotools']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/run.sh) + 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/_cm.json)*** + * get,openimages,annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_OPENIMAGES_CUSTOM_ANNOTATIONS': ['yes']}` + - CM script: [get-dataset-openimages-annotations](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-annotations) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/_cm.json) + +___ +### Script output +`cmr "get dataset openimages open-images object-detection original [,variations]" -j` +#### New environment keys (filter) + +* `CM_CALIBRATION_DATASET_PATH` +* `CM_DATASET_ANNOTATIONS_DIR_PATH` +* `CM_DATASET_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_CALIBRATION_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` +* `CM_DATASET_VALIDATION_ANNOTATIONS_FILE_PATH` +#### New environment keys auto-detected from customize + +* `CM_CALIBRATION_DATASET_PATH` +* `CM_DATASET_ANNOTATIONS_DIR_PATH` +* `CM_DATASET_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_CALIBRATION_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` +* `CM_DATASET_VALIDATION_ANNOTATIONS_FILE_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-openorca.md b/docs/AI-ML-datasets/get-dataset-openorca.md new file mode 100644 index 000000000..982a9c9c6 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-openorca.md @@ -0,0 +1,173 @@ +Automatically generated README for this automation recipe: **get-dataset-openorca** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-openorca,9252c4d90d5940b7) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openorca,language-processing,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset openorca language-processing original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,openorca,language-processing,original` + +`cm run script --tags=get,dataset,openorca,language-processing,original[,variations] ` + +*or* + +`cmr "get dataset openorca language-processing original"` + +`cmr "get dataset openorca language-processing original [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,openorca,language-processing,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,openorca,language-processing,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset openorca language-processing original[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION*: `yes` + - Workflow: + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_CALIBRATION*: `no` + - Workflow: + +
+ + + * Group "**size**" +
+ Click here to expand this section. + + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * **`_60`** (default) + - Environment variables: + - *CM_DATASET_SIZE*: `60` + - Workflow: + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `24576` + - Workflow: + * `_size.#` + - Environment variables: + - *CM_DATASET_SIZE*: `#` + - Workflow: + +
+ + +#### Default variations + +`_60,_validation` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_CALIBRATION: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/_cm.json)*** + * get,git,repo,_lfs,_repo.https://huggingface.co/datasets/Open-Orca/OpenOrca + * CM names: `--adr.['openorca-src']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/_cm.json) + +___ +### Script output +`cmr "get dataset openorca language-processing original [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_OPENORCA_PARQUET` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-squad-vocab.md b/docs/AI-ML-datasets/get-dataset-squad-vocab.md new file mode 100644 index 000000000..1152f2292 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-squad-vocab.md @@ -0,0 +1,142 @@ +Automatically generated README for this automation recipe: **get-dataset-squad-vocab** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-squad-vocab,e38874fff5094577) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get aux dataset-aux language-processing squad-aux vocab squad-vocab" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab` + +`cm run script --tags=get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab[,variations] ` + +*or* + +`cmr "get aux dataset-aux language-processing squad-aux vocab squad-vocab"` + +`cmr "get aux dataset-aux language-processing squad-aux vocab squad-vocab [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab"``` + +#### Run this script via Docker (beta) + +`cm docker script "get aux dataset-aux language-processing squad-aux vocab squad-vocab[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**download-source**" +
+ Click here to expand this section. + + * **`_from.zenodo`** (default) + - Environment variables: + - *CM_WGET_URL*: `https://zenodo.org/record/3733868/files/vocab.txt` + - Workflow: + +
+ + +#### Default variations + +`_from.zenodo` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/_cm.json) + +___ +### Script output +`cmr "get aux dataset-aux language-processing squad-aux vocab squad-vocab [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_SQUAD_VOCAB_PATH` +* `CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH` +#### New environment keys auto-detected from customize + +* `CM_DATASET_SQUAD_VOCAB_PATH` +* `CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-squad.md b/docs/AI-ML-datasets/get-dataset-squad.md new file mode 100644 index 000000000..a7f1a5595 --- /dev/null +++ b/docs/AI-ML-datasets/get-dataset-squad.md @@ -0,0 +1,129 @@ +Automatically generated README for this automation recipe: **get-dataset-squad** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-squad,6651c119c3ae49b3) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,squad,language-processing,validation,original* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset squad language-processing validation original" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,squad,language-processing,validation,original` + +`cm run script --tags=get,dataset,squad,language-processing,validation,original ` + +*or* + +`cmr "get dataset squad language-processing validation original"` + +`cmr "get dataset squad language-processing validation original " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,squad,language-processing,validation,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,squad,language-processing,validation,original"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset squad language-processing validation original" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `1.1` + +* `1.1` +* `2.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/_cm.json)*** + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/_cm.json) + +___ +### Script output +`cmr "get dataset squad language-processing validation original " -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_PATH` +* `CM_DATASET_SQUAD_PATH` +* `CM_DATASET_SQUAD_VAL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-criteo.md b/docs/AI-ML-datasets/get-preprocessed-dataset-criteo.md new file mode 100644 index 000000000..fec163969 --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-criteo.md @@ -0,0 +1,226 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-criteo** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-criteo,afa59956272a4ba4) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,criteo,recommendation,dlrm,preprocessed* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset criteo recommendation dlrm preprocessed" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,criteo,recommendation,dlrm,preprocessed` + +`cm run script --tags=get,dataset,criteo,recommendation,dlrm,preprocessed[,variations] [--input_flags]` + +*or* + +`cmr "get dataset criteo recommendation dlrm preprocessed"` + +`cmr "get dataset criteo recommendation dlrm preprocessed [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,criteo,recommendation,dlrm,preprocessed' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,criteo,recommendation,dlrm,preprocessed"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset criteo recommendation dlrm preprocessed[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_1` + - Environment variables: + - *CM_DATASET_SIZE*: `1` + - Workflow: + * `_50` + - Environment variables: + - *CM_DATASET_SIZE*: `50` + - Workflow: + * `_fake` + - Environment variables: + - *CM_CRITEO_FAKE*: `yes` + - Workflow: + * `_full` + - Workflow: + * `_validation` + - Workflow: + +
+ + + * Group "**type**" +
+ Click here to expand this section. + + * **`_multihot`** (default) + - Environment variables: + - *CM_DATASET_CRITEO_MULTIHOT*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,mlperf,training,src + * CM names: `--adr.['mlperf-training', 'training-src']...` + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + * get,generic-python-lib,_package.typing_inspect + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.iopath + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.fbgemm_gpu + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torchrec + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.pyre_extensions + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + +#### Default variations + +`_multihot` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` +* `--output_dir=value` → `CM_DATASET_PREPROCESSED_OUTPUT_PATH=value` +* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,criteo,original + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_PREPROCESSED_PATH': ['on']}` + * CM names: `--adr.['original-dataset', 'criteo-dataset']...` + - CM script: [get-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-criteo) + * get,dlrm,src + * CM names: `--adr.['dlrm-src']...` + - CM script: [get-dlrm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dlrm) + * mlperf,mlcommons,inference,source,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_scikit-learn + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_decorator + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_psutil + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tqdm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_mlperf_logging + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/_cm.json) + 1. ***Run native script if exists*** + * [run-multihot.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/run-multihot.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/_cm.json) + +___ +### Script output +`cmr "get dataset criteo recommendation dlrm preprocessed [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_PREPROCESSED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-generic.md b/docs/AI-ML-datasets/get-preprocessed-dataset-generic.md new file mode 100644 index 000000000..f6ecaad04 --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-generic.md @@ -0,0 +1,117 @@ +Automatically generated README for this automation recipe: **get-preprocesser-script-generic** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocesser-script-generic,d5e603627e2046eb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,preprocessor,generic,image-preprocessor,script* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get preprocessor generic image-preprocessor script" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,preprocessor,generic,image-preprocessor,script` + +`cm run script --tags=get,preprocessor,generic,image-preprocessor,script ` + +*or* + +`cmr "get preprocessor generic image-preprocessor script"` + +`cmr "get preprocessor generic image-preprocessor script " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,preprocessor,generic,image-preprocessor,script' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,preprocessor,generic,image-preprocessor,script"``` + +#### Run this script via Docker (beta) + +`cm docker script "get preprocessor generic image-preprocessor script" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/_cm.json) + +___ +### Script output +`cmr "get preprocessor generic image-preprocessor script " -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-imagenet.md b/docs/AI-ML-datasets/get-preprocessed-dataset-imagenet.md new file mode 100644 index 000000000..6c557299a --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-imagenet.md @@ -0,0 +1,456 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-imagenet** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-imagenet,f259d490bbaf45f5) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,imagenet,ILSVRC,image-classification,preprocessed* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset imagenet ILSVRC image-classification preprocessed" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,imagenet,ILSVRC,image-classification,preprocessed` + +`cm run script --tags=get,dataset,imagenet,ILSVRC,image-classification,preprocessed[,variations] [--input_flags]` + +*or* + +`cmr "get dataset imagenet ILSVRC image-classification preprocessed"` + +`cmr "get dataset imagenet ILSVRC image-classification preprocessed [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,imagenet,ILSVRC,image-classification,preprocessed' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,imagenet,ILSVRC,image-classification,preprocessed"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset imagenet ILSVRC image-classification preprocessed[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_mobilenet_` + - Environment variables: + - *CM_MODEL*: `mobilenet` + - Workflow: + * `_resnet50_` + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_500,validation` + - Workflow: + * `_default` + - Workflow: + * `_for.mobilenet,float32` + - Environment variables: + - *CM_DATASET_QUANTIZE*: `0` + - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `` + - *CM_DATASET_NORMALIZE_DATA*: `1` + - *CM_DATASET_SUBTRACT_MEANS*: `0` + - Workflow: + * `_for.mobilenet,rgb8` + - Environment variables: + - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `` + - *CM_DATASET_SUBTRACT_MEANS*: `0` + - *CM_DATASET_QUANTIZE*: `0` + - *CM_DATASET_NORMALIZE_DATA*: `0` + - *CM_DATASET_DATA_TYPE*: `uint8` + - Workflow: + * `_for.resnet50,float32` + - Workflow: + * `_for.resnet50,rgb8` + - Environment variables: + - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `` + - *CM_DATASET_SUBTRACT_MEANS*: `0` + - *CM_DATASET_NORMALIZE_DATA*: `0` + - *CM_DATASET_QUANTIZE*: `0` + - *CM_DATASET_DATA_TYPE*: `uint8` + - Workflow: + * `_for.resnet50,rgb8,uint8` + - Environment variables: + - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `123.68 116.78 103.94` + - *CM_DATASET_SUBTRACT_MEANS*: `1` + - *CM_DATASET_QUANTIZE*: `1` + - Workflow: + * `_for.resnet50,uint8` + - Environment variables: + - *CM_DATASET_QUANT_SCALE*: `1.18944883` + - *CM_DATASET_QUANT_OFFSET*: `0` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_PREPROCESS_PYTORCH*: `yes` + - *CM_MODEL*: `resnet50` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torchvision + * CM names: `--adr.['torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_tflite_tpu` + - Environment variables: + - *CM_MODEL*: `resnet50` + - *CM_PREPROCESS_TFLITE_TPU*: `yes` + - Workflow: + +
+ + + * Group "**calibration-option**" +
+ Click here to expand this section. + + * `_mlperf.option1` + - Environment variables: + - *CM_DATASET_CALIBRATION_OPTION*: `one` + - Workflow: + * `_mlperf.option2` + - Environment variables: + - *CM_DATASET_CALIBRATION_OPTION*: `two` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_TYPE*: `calibration` + - Workflow: + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_TYPE*: `validation` + - Workflow: + +
+ + + * Group "**extension**" +
+ Click here to expand this section. + + * `_rgb32` + - Environment variables: + - *CM_DATASET_PREPROCESSED_EXTENSION*: `rgb32` + - Workflow: + * `_rgb8` + - Environment variables: + - *CM_DATASET_PREPROCESSED_EXTENSION*: `rgb8` + - Workflow: + +
+ + + * Group "**interpolation-method**" +
+ Click here to expand this section. + + * `_inter.area` + - Environment variables: + - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_AREA` + - Workflow: + * `_inter.linear` + - Environment variables: + - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_LINEAR` + - Workflow: + +
+ + + * Group "**layout**" +
+ Click here to expand this section. + + * **`_NCHW`** (default) + - Environment variables: + - *CM_DATASET_DATA_LAYOUT*: `NCHW` + - Workflow: + * `_NHWC` + - Environment variables: + - *CM_DATASET_DATA_LAYOUT*: `NHWC` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_for.mobilenet` + - Workflow: + * `_for.resnet50` + - Environment variables: + - *CM_DATASET_SUBTRACT_MEANS*: `1` + - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `123.68 116.78 103.94` + - *CM_DATASET_NORMALIZE_DATA*: `0` + - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_AREA` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_float32` + - Environment variables: + - *CM_DATASET_DATA_TYPE*: `float32` + - *CM_DATASET_QUANTIZE*: `0` + - *CM_DATASET_CONVERT_TO_UNSIGNED*: `0` + - Workflow: + * `_int8` + - Environment variables: + - *CM_DATASET_DATA_TYPE*: `int8` + - *CM_DATASET_QUANTIZE*: `1` + - *CM_DATASET_CONVERT_TO_UNSIGNED*: `0` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_DATASET_DATA_TYPE*: `uint8` + - *CM_DATASET_DATA_TYPE_INPUT*: `float32` + - *CM_DATASET_QUANTIZE*: `1` + - *CM_DATASET_CONVERT_TO_UNSIGNED*: `1` + - Workflow: + +
+ + + * Group "**preprocessing-source**" +
+ Click here to expand this section. + + * `_generic-preprocessor` + - Environment variables: + - *CM_DATASET_REFERENCE_PREPROCESSOR*: `0` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * get,generic,image-preprocessor + - CM script: [get-preprocesser-script-generic](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocesser-script-generic) + * **`_mlcommons-reference-preprocessor`** (default) + - Environment variables: + - *CM_DATASET_REFERENCE_PREPROCESSOR*: `1` + - Workflow: + +
+ + + * Group "**resolution**" +
+ Click here to expand this section. + + * `_resolution.#` + - Environment variables: + - *CM_DATASET_INPUT_SQUARE_SIDE*: `#` + - Workflow: + * **`_resolution.224`** (default) + - Environment variables: + - *CM_DATASET_INPUT_SQUARE_SIDE*: `224` + - Workflow: + +
+ + + * Group "**size**" +
+ Click here to expand this section. + + * `_1` + - Environment variables: + - *CM_DATASET_SIZE*: `1` + - Workflow: + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `50000` + - Workflow: + * `_size.#` + - Environment variables: + - *CM_DATASET_SIZE*: `#` + - Workflow: + +
+ + +#### Default variations + +`_NCHW,_mlcommons-reference-preprocessor,_resolution.224,_validation` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` +* `--imagenet_path=value` → `CM_IMAGENET_PATH=value` +* `--imagenet_preprocessed_path=value` → `CM_IMAGENET_PREPROCESSED_PATH=value` +* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_CROP_FACTOR: `87.5` +* CM_DATASET_DATA_TYPE: `float32` +* CM_DATASET_DATA_LAYOUT: `NCHW` +* CM_DATASET_QUANT_SCALE: `1` +* CM_DATASET_QUANTIZE: `0` +* CM_DATASET_QUANT_OFFSET: `0` +* CM_DATASET_PREPROCESSED_EXTENSION: `npy` +* CM_DATASET_CONVERT_TO_UNSIGNED: `0` +* CM_DATASET_REFERENCE_PREPROCESSOR: `1` +* CM_PREPROCESS_VGG: `yes` +* CM_MODEL: `resnet50` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/_cm.json)*** + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_IMAGENET_PREPROCESSED_PATH': ['on']}` + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,image-classification,original + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_IMAGENET_PREPROCESSED_PATH': ['on']}` + * CM names: `--adr.['original-dataset']...` + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + * get,dataset-aux,image-classification,imagenet-aux + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_TYPE': ['validation']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_IMAGENET_PREPROCESSED_PATH': ['on']}` + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,dataset,imagenet,calibration + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_TYPE': ['calibration']}` + - CM script: [get-dataset-imagenet-calibration](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-calibration) + * get,generic-python-lib,_package.opencv-python-headless + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pillow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * mlperf,mlcommons,inference,source,src + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_REFERENCE_PREPROCESSOR': ['1']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_IMAGENET_PREPROCESSED_PATH': ['on']}` + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/_cm.json) + +___ +### Script output +`cmr "get dataset imagenet ILSVRC image-classification preprocessed [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_DATA_TYPE_INPUT` +* `CM_DATASET_IMAGES_LIST` +* `CM_DATASET_PREPROCESSED_IMAGENAMES_LIST` +* `CM_DATASET_PREPROCESSED_IMAGES_LIST` +* `CM_DATASET_PREPROCESSED_PATH` +* `CM_DATASET_SIZE` +* `CM_DATASET_TYPE` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-kits19.md b/docs/AI-ML-datasets/get-preprocessed-dataset-kits19.md new file mode 100644 index 000000000..35e4a05b2 --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-kits19.md @@ -0,0 +1,232 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-kits19** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-kits19,2094d9b9ab6c4c9e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,medical-imaging,kits19,preprocessed* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset medical-imaging kits19 preprocessed" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,medical-imaging,kits19,preprocessed` + +`cm run script --tags=get,dataset,medical-imaging,kits19,preprocessed[,variations] [--input_flags]` + +*or* + +`cmr "get dataset medical-imaging kits19 preprocessed"` + +`cmr "get dataset medical-imaging kits19 preprocessed [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,medical-imaging,kits19,preprocessed' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,medical-imaging,kits19,preprocessed"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset medical-imaging kits19 preprocessed[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_nvidia` + - Environment variables: + - *CM_PREPROCESSING_BY_NVIDIA*: `yes` + - Workflow: + +
+ + + * Group "**dataset-count**" +
+ Click here to expand this section. + + * `_1` + - Environment variables: + - *CM_DATASET_SIZE*: `1` + - Workflow: + * `_5` + - Environment variables: + - *CM_DATASET_SIZE*: `5` + - Workflow: + * `_50` + - Environment variables: + - *CM_DATASET_SIZE*: `50` + - Workflow: + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `` + - Workflow: + +
+ + + * Group "**dataset-precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_DATASET_DTYPE*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_DATASET_DTYPE*: `int8` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_PATH*: `<<>>` + - Workflow: + * **`_validation`** (default) + - Workflow: + +
+ + +#### Default variations + +`_fp32,_validation` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` +* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET: `kits19` +* CM_DATASET_DTYPE: `fp32` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,medical-imaging,kits19,original + * CM names: `--adr.['original-dataset']...` + - CM script: [get-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-kits19) + * mlperf,mlcommons,inference,source,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_scipy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_nibabel + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + * CM names: `--adr.['numpy']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/_cm.json) + +___ +### Script output +`cmr "get dataset medical-imaging kits19 preprocessed [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_PREPROCESSED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-librispeech.md b/docs/AI-ML-datasets/get-preprocessed-dataset-librispeech.md new file mode 100644 index 000000000..875bcf494 --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-librispeech.md @@ -0,0 +1,222 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-librispeech** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-librispeech,e9f62fc969d5483a) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,speech-recognition,librispeech,preprocessed* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset speech-recognition librispeech preprocessed" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,speech-recognition,librispeech,preprocessed` + +`cm run script --tags=get,dataset,speech-recognition,librispeech,preprocessed[,variations] [--input_flags]` + +*or* + +`cmr "get dataset speech-recognition librispeech preprocessed"` + +`cmr "get dataset speech-recognition librispeech preprocessed [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,speech-recognition,librispeech,preprocessed' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,speech-recognition,librispeech,preprocessed"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset speech-recognition librispeech preprocessed[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**dataset-count**" +
+ Click here to expand this section. + + * `_1` + - Environment variables: + - *CM_DATASET_SIZE*: `1` + - Workflow: + * `_5` + - Environment variables: + - *CM_DATASET_SIZE*: `5` + - Workflow: + * `_50` + - Environment variables: + - *CM_DATASET_SIZE*: `50` + - Workflow: + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `` + - Workflow: + +
+ + + * Group "**dataset-precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_DATASET_DTYPE*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_DATASET_DTYPE*: `int8` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_PATH*: `<<>>` + - Workflow: + * **`_validation`** (default) + - Workflow: + +
+ + +#### Default variations + +`_fp32,_validation` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` +* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET: `kits19` +* CM_DATASET_DTYPE: `fp32` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,speech-recognition,librispeech,original + * CM names: `--adr.['original-dataset']...` + - CM script: [get-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-librispeech) + * mlperf,mlcommons,inference,source,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_sox + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tqdm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,sys-util,generic,_sox + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/_cm.json) + +___ +### Script output +`cmr "get dataset speech-recognition librispeech preprocessed [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_PREPROCESSED_JSON` +* `CM_DATASET_PREPROCESSED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-openimages.md b/docs/AI-ML-datasets/get-preprocessed-dataset-openimages.md new file mode 100644 index 000000000..84ee7e534 --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-openimages.md @@ -0,0 +1,401 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-openimages** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-openimages,9842f1be8cba4c7b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openimages,open-images,object-detection,preprocessed* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset openimages open-images object-detection preprocessed" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,openimages,open-images,object-detection,preprocessed` + +`cm run script --tags=get,dataset,openimages,open-images,object-detection,preprocessed[,variations] [--input_flags]` + +*or* + +`cmr "get dataset openimages open-images object-detection preprocessed"` + +`cmr "get dataset openimages open-images object-detection preprocessed [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,openimages,open-images,object-detection,preprocessed' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,openimages,open-images,object-detection,preprocessed"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset openimages open-images object-detection preprocessed[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_filter` + - Workflow: + * `_filter,calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION_FILTER*: `yes` + - Workflow: + * `_for.retinanet.onnx` + - Environment variables: + - *CM_ML_MODEL_NAME*: `retinanet` + - *CM_DATASET_SUBTRACT_MEANS*: `1` + - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `0.485 0.456 0.406` + - *CM_DATASET_GIVEN_CHANNEL_STDS*: `0.229 0.224 0.225` + - *CM_DATASET_NORMALIZE_DATA*: `0` + - *CM_DATASET_NORMALIZE_LOWER*: `0.0` + - *CM_DATASET_NORMALIZE_UPPER*: `1.0` + - *CM_DATASET_CONVERT_TO_BGR*: `0` + - *CM_DATASET_CROP_FACTOR*: `100.0` + - Workflow: + * `_for.retinanet.onnx,fp32` + - Workflow: + * `_for.retinanet.onnx,uint8` + - Environment variables: + - *CM_DATASET_QUANT_SCALE*: `0.0186584499` + - *CM_DATASET_QUANT_OFFSET*: `114` + - Workflow: + * `_full,validation` + - Environment variables: + - *CM_DATASET_SIZE*: `24781` + - Workflow: + * `_nvidia` + - Environment variables: + - *CM_PREPROCESSING_BY_NVIDIA*: `yes` + - Workflow: + * `_quant-offset.#` + - Workflow: + * `_quant-scale.#` + - Workflow: + +
+ + + * Group "**annotations**" +
+ Click here to expand this section. + + * `_custom-annotations` + - Workflow: + * **`_default-annotations`** (default) + - Workflow: + +
+ + + * Group "**dataset-count**" +
+ Click here to expand this section. + + * **`_50`** (default) + - Environment variables: + - *CM_DATASET_SIZE*: `50` + - Workflow: + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * `_full` + - Workflow: + * `_size.#` + - Environment variables: + - *CM_DATASET_SIZE*: `#` + - Workflow: + +
+ + + * Group "**dataset-layout**" +
+ Click here to expand this section. + + * **`_NCHW`** (default) + - Environment variables: + - *CM_DATASET_DATA_LAYOUT*: `NCHW` + - Workflow: + * `_NHWC` + - Environment variables: + - *CM_DATASET_DATA_LAYOUT*: `NHWC` + - Workflow: + +
+ + + * Group "**dataset-precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_DATASET_DTYPE*: `fp32` + - *CM_DATASET_INPUT_DTYPE*: `fp32` + - *CM_DATASET_QUANTIZE*: `0` + - *CM_DATASET_CONVERT_TO_UNSIGNED*: `0` + - Workflow: + * `_int8` + - Environment variables: + - *CM_DATASET_DTYPE*: `int8` + - *CM_DATASET_INPUT_DTYPE*: `fp32` + - *CM_DATASET_QUANTIZE*: `1` + - *CM_DATASET_CONVERT_TO_UNSIGNED*: `0` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_DATASET_DTYPE*: `uint8` + - *CM_DATASET_INPUT_DTYPE*: `fp32` + - *CM_DATASET_QUANTIZE*: `1` + - *CM_DATASET_CONVERT_TO_UNSIGNED*: `1` + - Workflow: + +
+ + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_PATH*: `<<>>` + - *CM_DATASET_ANNOTATIONS_FILE_PATH*: `<<>>` + - *CM_DATASET_TYPE*: `calibration` + - Workflow: + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_TYPE*: `validation` + - Workflow: + +
+ + + * Group "**extension**" +
+ Click here to expand this section. + + * `_npy` + - Environment variables: + - *CM_DATASET_PREPROCESSED_EXTENSION*: `npy` + - Workflow: + * `_raw` + - Environment variables: + - *CM_DATASET_PREPROCESSED_EXTENSION*: `raw` + - Workflow: + * `_rgb32` + - Environment variables: + - *CM_DATASET_PREPROCESSED_EXTENSION*: `rgb32` + - Workflow: + * `_rgb8` + - Environment variables: + - *CM_DATASET_PREPROCESSED_EXTENSION*: `rgb8` + - Workflow: + +
+ + + * Group "**filter-size**" +
+ Click here to expand this section. + + * `_filter-size.#` + - Workflow: + +
+ + + * Group "**interpolation-method**" +
+ Click here to expand this section. + + * `_inter.area` + - Environment variables: + - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_AREA` + - Workflow: + * `_inter.linear` + - Environment variables: + - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_LINEAR` + - Workflow: + +
+ + + * Group "**preprocessing-source**" +
+ Click here to expand this section. + + * `_generic-preprocessor` + - Environment variables: + - *CM_DATASET_REFERENCE_PREPROCESSOR*: `0` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch + * CM names: `--adr.['torch', 'pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + * CM names: `--adr.['torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Read "prehook_deps" on other CM scripts*** + * get,generic,image-preprocessor + - CM script: [get-preprocesser-script-generic](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocesser-script-generic) + * **`_mlcommons-reference-preprocessor`** (default) + - Environment variables: + - *CM_DATASET_REFERENCE_PREPROCESSOR*: `1` + - Workflow: + +
+ + +#### Default variations + +`_50,_NCHW,_default-annotations,_fp32,_mlcommons-reference-preprocessor,_validation` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` +* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET: `OPENIMAGES` +* CM_DATASET_DTYPE: `fp32` +* CM_DATASET_INPUT_SQUARE_SIDE: `800` +* CM_DATASET_CROP_FACTOR: `100.0` +* CM_DATASET_QUANT_SCALE: `1` +* CM_DATASET_QUANTIZE: `0` +* CM_DATASET_QUANT_OFFSET: `0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,object-detection,openimages,original + * CM names: `--adr.['original-dataset']...` + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + * mlperf,mlcommons,inference,source,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_pycocotools + * CM names: `--adr.['pycocotools']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pillow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.ujson + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + * CM names: `--adr.['numpy']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + * CM names: `--adr.['numpy']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/_cm.json) + +___ +### Script output +`cmr "get dataset openimages open-images object-detection preprocessed [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_ANNOTATIONS_DIR_PATH` +* `CM_DATASET_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_PREPROCESSED_IMAGENAMES_LIST` +* `CM_DATASET_PREPROCESSED_IMAGES_LIST` +* `CM_DATASET_PREPROCESSED_PATH` +* `CM_DATASET_QUANT_OFFSET` +* `CM_DATASET_QUANT_SCALE` +* `CM_DATASET_TYPE` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-openorca.md b/docs/AI-ML-datasets/get-preprocessed-dataset-openorca.md new file mode 100644 index 000000000..cd4e07dd9 --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-openorca.md @@ -0,0 +1,178 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-openorca** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-openorca,5614c39cb1564d72) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openorca,language-processing,preprocessed* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset openorca language-processing preprocessed" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,openorca,language-processing,preprocessed` + +`cm run script --tags=get,dataset,openorca,language-processing,preprocessed[,variations] ` + +*or* + +`cmr "get dataset openorca language-processing preprocessed"` + +`cmr "get dataset openorca language-processing preprocessed [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,openorca,language-processing,preprocessed' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,openorca,language-processing,preprocessed"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset openorca language-processing preprocessed[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Environment variables: + - *CM_DATASET_CALIBRATION*: `yes` + - Workflow: + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_CALIBRATION*: `no` + - Workflow: + +
+ + + * Group "**size**" +
+ Click here to expand this section. + + * **`_60`** (default) + - Workflow: + * `_full` + - Workflow: + * `_size.#` + - Workflow: + +
+ + +#### Default variations + +`_60,_validation` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_CALIBRATION: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/_cm.json)*** + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,original,openorca + * CM names: `--adr.['openorca-original', 'dataset-original']...` + - CM script: [get-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openorca) + * mlperf,inference,source + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_package.pyarrow + * CM names: `--adr.['pyarrow']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.fastparquet + * CM names: `--adr.['fastparquet']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,ml-model,llama2 + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/_cm.json) + +___ +### Script output +`cmr "get dataset openorca language-processing preprocessed [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_PREPROCESSED_PATH` +#### New environment keys auto-detected from customize + +* `CM_DATASET_PREPROCESSED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-squad.md b/docs/AI-ML-datasets/get-preprocessed-dataset-squad.md new file mode 100644 index 000000000..c7d80cfd0 --- /dev/null +++ b/docs/AI-ML-datasets/get-preprocessed-dataset-squad.md @@ -0,0 +1,238 @@ +Automatically generated README for this automation recipe: **get-preprocessed-dataset-squad** + +Category: **AI/ML datasets** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-squad,7cd1d9b7e8af4788) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,dataset,preprocessed,tokenized,squad* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get dataset preprocessed tokenized squad" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,dataset,preprocessed,tokenized,squad` + +`cm run script --tags=get,dataset,preprocessed,tokenized,squad[,variations] ` + +*or* + +`cmr "get dataset preprocessed tokenized squad"` + +`cmr "get dataset preprocessed tokenized squad [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,preprocessed,tokenized,squad' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,preprocessed,tokenized,squad"``` + +#### Run this script via Docker (beta) + +`cm docker script "get dataset preprocessed tokenized squad[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**calibration-set**" +
+ Click here to expand this section. + + * `_calib1` + - Environment variables: + - *CM_DATASET_SQUAD_CALIBRATION_SET*: `one` + - Workflow: + * `_calib2` + - Environment variables: + - *CM_DATASET_SQUAD_CALIBRATION_SET*: `two` + - Workflow: + * **`_no-calib`** (default) + - Environment variables: + - *CM_DATASET_SQUAD_CALIBRATION_SET*: `` + - Workflow: + +
+ + + * Group "**doc-stride**" +
+ Click here to expand this section. + + * `_doc-stride.#` + - Environment variables: + - *CM_DATASET_DOC_STRIDE*: `#` + - Workflow: + * **`_doc-stride.128`** (default) + - Environment variables: + - *CM_DATASET_DOC_STRIDE*: `128` + - Workflow: + +
+ + + * Group "**packing**" +
+ Click here to expand this section. + + * `_packed` + - Environment variables: + - *CM_DATASET_SQUAD_PACKED*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,preprocessed,squad,_pickle + - CM script: [get-preprocessed-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-squad) + +
+ + + * Group "**raw**" +
+ Click here to expand this section. + + * `_pickle` + - Environment variables: + - *CM_DATASET_RAW*: `no` + - Workflow: + * **`_raw`** (default) + - Environment variables: + - *CM_DATASET_RAW*: `yes` + - Workflow: + +
+ + + * Group "**seq-length**" +
+ Click here to expand this section. + + * `_seq-length.#` + - Environment variables: + - *CM_DATASET_MAX_SEQ_LENGTH*: `#` + - Workflow: + * **`_seq-length.384`** (default) + - Environment variables: + - *CM_DATASET_MAX_SEQ_LENGTH*: `384` + - Workflow: + +
+ + +#### Default variations + +`_doc-stride.128,_no-calib,_raw,_seq-length.384` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/_cm.yaml)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,squad,dataset,original + * CM names: `--adr.['squad-dataset']...` + - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) + * get,squad,vocab + * CM names: `--adr.['squad-vocab']...` + - CM script: [get-bert-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bert-squad-vocab) + * get,generic-python-lib,_package.tokenization + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.tensorflow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/_cm.yaml) + 1. ***Run native script if exists*** + * [run-packed.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/run-packed.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/_cm.yaml) + +___ +### Script output +`cmr "get dataset preprocessed tokenized squad [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_SQUAD_TOKENIZED_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_SQUAD_TOKENIZED_DOC_STRIDE` +* `CM_DATASET_SQUAD_TOKENIZED_INPUT_IDS` +* `CM_DATASET_SQUAD_TOKENIZED_INPUT_MASK` +* `CM_DATASET_SQUAD_TOKENIZED_MAX_QUERY_LENGTH` +* `CM_DATASET_SQUAD_TOKENIZED_MAX_SEQ_LENGTH` +* `CM_DATASET_SQUAD_TOKENIZED_PACKED_FILENAMES_FILE` +* `CM_DATASET_SQUAD_TOKENIZED_PICKLE_FILE` +* `CM_DATASET_SQUAD_TOKENIZED_ROOT` +* `CM_DATASET_SQUAD_TOKENIZED_SEGMENT_IDS` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-google-saxml.md b/docs/AI-ML-frameworks/get-google-saxml.md new file mode 100644 index 000000000..5a7e3d351 --- /dev/null +++ b/docs/AI-ML-frameworks/get-google-saxml.md @@ -0,0 +1,133 @@ +Automatically generated README for this automation recipe: **get-google-saxml** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-google-saxml,5d7b17d84b5a48fb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,google,saxml* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get google saxml" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,google,saxml` + +`cm run script --tags=get,google,saxml ` + +*or* + +`cmr "get google saxml"` + +`cmr "get google saxml " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,google,saxml' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,google,saxml"``` + +#### Run this script via Docker (beta) + +`cm docker script "get google saxml" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `master` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,_repo.https://github.com/google/saxml + * CM names: `--adr.['google-saxml-git-src']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,bazel + * CM names: `--adr.['bazel']...` + - CM script: [get-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bazel) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/_cm.yaml) + +___ +### Script output +`cmr "get google saxml " -j` +#### New environment keys (filter) + +* `CM_GOOGLE_SAXML*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-frameworks/get-onnxruntime-prebuilt.md b/docs/AI-ML-frameworks/get-onnxruntime-prebuilt.md new file mode 100644 index 000000000..20419da08 --- /dev/null +++ b/docs/AI-ML-frameworks/get-onnxruntime-prebuilt.md @@ -0,0 +1,157 @@ +Automatically generated README for this automation recipe: **get-onnxruntime-prebuilt** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-onnxruntime-prebuilt,be02c84ff57c4244) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install onnxruntime get prebuilt lib lang-c lang-cpp" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp` + +`cm run script --tags=install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp[,variations] ` + +*or* + +`cmr "install onnxruntime get prebuilt lib lang-c lang-cpp"` + +`cmr "install onnxruntime get prebuilt lib lang-c lang-cpp [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp"``` + +#### Run this script via Docker (beta) + +`cm docker script "install onnxruntime get prebuilt lib lang-c lang-cpp[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_ONNXRUNTIME_DEVICE*: `` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_ONNXRUNTIME_DEVICE*: `gpu` + - Workflow: + +
+ + +#### Default variations + +`_cpu` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `1.16.3` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/_cm.json) + +___ +### Script output +`cmr "install onnxruntime get prebuilt lib lang-c lang-cpp [,variations]" -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_ONNXRUNTIME_INCLUDE_PATH` +* `CM_ONNXRUNTIME_LIB_PATH` +#### New environment keys auto-detected from customize + +* `CM_ONNXRUNTIME_INCLUDE_PATH` +* `CM_ONNXRUNTIME_LIB_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-qaic-apps-sdk.md b/docs/AI-ML-frameworks/get-qaic-apps-sdk.md new file mode 100644 index 000000000..836595396 --- /dev/null +++ b/docs/AI-ML-frameworks/get-qaic-apps-sdk.md @@ -0,0 +1,124 @@ +Automatically generated README for this automation recipe: **get-qaic-apps-sdk** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-qaic-apps-sdk,0a9e206af6764da9) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get detect qaic apps sdk apps-sdk qaic-apps-sdk" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk` + +`cm run script --tags=get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk ` + +*or* + +`cmr "get detect qaic apps sdk apps-sdk qaic-apps-sdk"` + +`cmr "get detect qaic apps sdk apps-sdk qaic-apps-sdk " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk"``` + +#### Run this script via Docker (beta) + +`cm docker script "get detect qaic apps sdk apps-sdk qaic-apps-sdk" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/_cm.json) + +___ +### Script output +`cmr "get detect qaic apps sdk apps-sdk qaic-apps-sdk " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_QAIC_EXEC_PATH` +#### New environment keys auto-detected from customize + +* `CM_QAIC_EXEC_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-qaic-platform-sdk.md b/docs/AI-ML-frameworks/get-qaic-platform-sdk.md new file mode 100644 index 000000000..f712c9859 --- /dev/null +++ b/docs/AI-ML-frameworks/get-qaic-platform-sdk.md @@ -0,0 +1,128 @@ +Automatically generated README for this automation recipe: **get-qaic-platform-sdk** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-qaic-platform-sdk,a60f86918dc9457d) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get detect qaic platform sdk platform-sdk qaic-platform-sdk" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk` + +`cm run script --tags=get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk ` + +*or* + +`cmr "get detect qaic platform sdk platform-sdk qaic-platform-sdk"` + +`cmr "get detect qaic platform sdk platform-sdk qaic-platform-sdk " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk"``` + +#### Run this script via Docker (beta) + +`cm docker script "get detect qaic platform sdk platform-sdk qaic-platform-sdk" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/_cm.json) + +___ +### Script output +`cmr "get detect qaic platform sdk platform-sdk qaic-platform-sdk " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_QAIC_RUNNER_PATH` +* `CM_QAIC_TOOLS_PATH` +#### New environment keys auto-detected from customize + +* `CM_QAIC_RUNNER_PATH` +* `CM_QAIC_TOOLS_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-qaic-software-kit.md b/docs/AI-ML-frameworks/get-qaic-software-kit.md new file mode 100644 index 000000000..62ab27a7c --- /dev/null +++ b/docs/AI-ML-frameworks/get-qaic-software-kit.md @@ -0,0 +1,176 @@ +Automatically generated README for this automation recipe: **get-qaic-software-kit** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-qaic-software-kit,3344655922694bbb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,qaic,software,kit,qaic-software-kit* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get qaic software kit qaic-software-kit" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,qaic,software,kit,qaic-software-kit` + +`cm run script --tags=get,qaic,software,kit,qaic-software-kit[,variations] ` + +*or* + +`cmr "get qaic software kit qaic-software-kit"` + +`cmr "get qaic software kit qaic-software-kit [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,qaic,software,kit,qaic-software-kit' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,qaic,software,kit,qaic-software-kit"``` + +#### Run this script via Docker (beta) + +`cm docker script "get qaic software kit qaic-software-kit[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + +
+ + + * Group "**repo-source**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.quic`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/quic/software-kit-for-qualcomm-cloud-ai-100` + - Workflow: + +
+ + +#### Default variations + +`_repo.quic` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/_cm.json)*** + * get,git,repo + * CM names: `--adr.['qaic-software-git-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,generic,sys-util,_libudev-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libpci-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,google,test + - CM script: [get-google-test](https://github.com/mlcommons/cm4mlops/tree/master/script/get-google-test) + * get,cmake + * CM names: `--adr.['cmake']...` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,compiler + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/_cm.json) + +___ +### Script output +`cmr "get qaic software kit qaic-software-kit [,variations]" -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_QAIC_RUNNER_PATH` +* `CM_QAIC_SOFTWARE_KIT_PATH` +#### New environment keys auto-detected from customize + +* `CM_QAIC_RUNNER_PATH` +* `CM_QAIC_SOFTWARE_KIT_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-rocm.md b/docs/AI-ML-frameworks/get-rocm.md new file mode 100644 index 000000000..ed5e7b629 --- /dev/null +++ b/docs/AI-ML-frameworks/get-rocm.md @@ -0,0 +1,126 @@ +Automatically generated README for this automation recipe: **get-rocm** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-rocm,23a69f9477cb4dab) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,rocm,get-rocm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get rocm get-rocm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,rocm,get-rocm` + +`cm run script --tags=get,rocm,get-rocm ` + +*or* + +`cmr "get rocm get-rocm"` + +`cmr "get rocm get-rocm " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,rocm,get-rocm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,rocm,get-rocm"``` + +#### Run this script via Docker (beta) + +`cm docker script "get rocm get-rocm" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/_cm.json)*** + * install,rocm + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-rocm](https://github.com/mlcommons/cm4mlops/tree/master/script/install-rocm) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/_cm.json) + +___ +### Script output +`cmr "get rocm get-rocm " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_ROCM_*` +#### New environment keys auto-detected from customize + +* `CM_ROCM_CACHE_TAGS` +* `CM_ROCM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-tvm.md b/docs/AI-ML-frameworks/get-tvm.md new file mode 100644 index 000000000..af40c0419 --- /dev/null +++ b/docs/AI-ML-frameworks/get-tvm.md @@ -0,0 +1,198 @@ +Automatically generated README for this automation recipe: **get-tvm** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-tvm,93c89140e6224f4b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,tvm,get-tvm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get tvm get-tvm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,tvm,get-tvm` + +`cm run script --tags=get,tvm,get-tvm[,variations] ` + +*or* + +`cmr "get tvm get-tvm"` + +`cmr "get tvm get-tvm [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,tvm,get-tvm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,tvm,get-tvm"``` + +#### Run this script via Docker (beta) + +`cm docker script "get tvm get-tvm[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_cuda` + - Environment variables: + - *CM_TVM_USE_CUDA*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_openmp` + - Environment variables: + - *CM_TVM_USE_OPENMP*: `yes` + - Workflow: + +
+ + + * Group "**installation-type**" +
+ Click here to expand this section. + + * **`_llvm`** (default) + - Environment variables: + - *CM_TVM_USE_LLVM*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,llvm + * CM names: `--adr.['llvm']...` + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + * `_pip-install` + - Environment variables: + - *CM_TVM_PIP_INSTALL*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_apache-tvm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + +#### Default variations + +`_llvm` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `main` +* CM_GIT_URL: `https://github.com/apache/tvm` +* CM_TVM_PIP_INSTALL: `no` + +
+ +#### Versions +* `main` +* `v0.10.0` +* `v0.7.0` +* `v0.8.0` +* `v0.9.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/_cm.json)*** + * cmake,get-cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,generic-python-lib,_typing_extensions + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_decorator + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_scipy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_attrs + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_psutil + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/_cm.json) + +___ +### Script output +`cmr "get tvm get-tvm [,variations]" -j` +#### New environment keys (filter) + +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PYTHONPATH` +* `CM_TVM_*` +* `TVM_HOME` +#### New environment keys auto-detected from customize + +* `CM_TVM_PATH_INCLUDE` +* `CM_TVM_PATH_LIB` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src.md b/docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src.md new file mode 100644 index 000000000..b8895826d --- /dev/null +++ b/docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src.md @@ -0,0 +1,199 @@ +Automatically generated README for this automation recipe: **install-qaic-compute-sdk-from-src** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-qaic-compute-sdk-from-src,9701bdda97fa4045) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk` + +`cm run script --tags=get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk[,variations] ` + +*or* + +`cmr "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk"` + +`cmr "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk"``` + +#### Run this script via Docker (beta) + +`cm docker script "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + +
+ + + * Group "**installation-mode**" +
+ Click here to expand this section. + + * `_debug` + - Environment variables: + - *CM_QAIC_COMPUTE_SDK_INSTALL_MODE*: `debug` + - Workflow: + * **`_release`** (default) + - Environment variables: + - *CM_QAIC_COMPUTE_SDK_INSTALL_MODE*: `release` + - Workflow: + * `_release-assert` + - Environment variables: + - *CM_QAIC_COMPUTE_SDK_INSTALL_MODE*: `release-assert` + - Workflow: + +
+ + + * Group "**repo-source**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.quic`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/quic/software-kit-for-qualcomm-cloud-ai-100-cc` + - Workflow: + +
+ + +#### Default variations + +`_release,_repo.quic` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/_cm.json)*** + * get,git,repo,_repo.https://github.com/quic/software-kit-for-qualcomm-cloud-ai-100-cc + * CM names: `--adr.['qaic-software-git-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,cmake + * CM names: `--adr.['cmake']...` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,llvm,_from-src + * CM names: `--adr.['llvm']...` + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + * get,generic,sys-util,_libudev-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libpci-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,google,test + - CM script: [get-google-test](https://github.com/mlcommons/cm4mlops/tree/master/script/get-google-test) + * get,generic-sys-util,_ninja-build + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic-sys-util,_rsync + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * download-and-extract,_extract,_url.https://codelinaro.jfrog.io/artifactory/codelinaro-toolchain-for-hexagon/v15.0.5/clang+llvm-15.0.5-cross-hexagon-unknown-linux-musl.tar.xz + * CM names: `--adr.['dae']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/_cm.json) + +___ +### Script output +`cmr "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk [,variations]" -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_QAIC_COMPUTE_SDK_PATH` +#### New environment keys auto-detected from customize + +* `CM_QAIC_COMPUTE_SDK_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/install-rocm.md b/docs/AI-ML-frameworks/install-rocm.md new file mode 100644 index 000000000..019cd2cd6 --- /dev/null +++ b/docs/AI-ML-frameworks/install-rocm.md @@ -0,0 +1,129 @@ +Automatically generated README for this automation recipe: **install-rocm** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-rocm,9d13f90463ce4545) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,rocm,install-rocm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install rocm install-rocm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,rocm,install-rocm` + +`cm run script --tags=install,rocm,install-rocm ` + +*or* + +`cmr "install rocm install-rocm"` + +`cmr "install rocm install-rocm " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,rocm,install-rocm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,rocm,install-rocm"``` + +#### Run this script via Docker (beta) + +`cm docker script "install rocm install-rocm" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `5.7.1` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/_cm.json) + 1. ***Run native script if exists*** + * [run-rhel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/run-rhel.sh) + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/run-ubuntu.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/_cm.json) + +___ +### Script output +`cmr "install rocm install-rocm " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_ROCM_*` +#### New environment keys auto-detected from customize + +* `CM_ROCM_BIN_WITH_PATH` +* `CM_ROCM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/install-tensorflow-for-c.md b/docs/AI-ML-frameworks/install-tensorflow-for-c.md new file mode 100644 index 000000000..845aae451 --- /dev/null +++ b/docs/AI-ML-frameworks/install-tensorflow-for-c.md @@ -0,0 +1,122 @@ +Automatically generated README for this automation recipe: **install-tensorflow-for-c** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-tensorflow-for-c,d73783d8302547d7) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,tensorflow,lib,lang-c* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install tensorflow lib lang-c" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,tensorflow,lib,lang-c` + +`cm run script --tags=install,tensorflow,lib,lang-c ` + +*or* + +`cmr "install tensorflow lib lang-c"` + +`cmr "install tensorflow lib lang-c " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,tensorflow,lib,lang-c' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,tensorflow,lib,lang-c"``` + +#### Run this script via Docker (beta) + +`cm docker script "install tensorflow lib lang-c" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `2.8.0` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/_cm.json) + +___ +### Script output +`cmr "install tensorflow lib lang-c " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-frameworks/install-tensorflow-from-src.md b/docs/AI-ML-frameworks/install-tensorflow-from-src.md new file mode 100644 index 000000000..4421e0df6 --- /dev/null +++ b/docs/AI-ML-frameworks/install-tensorflow-from-src.md @@ -0,0 +1,165 @@ +Automatically generated README for this automation recipe: **install-tensorflow-from-src** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-tensorflow-from-src,a974533c4c854597) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,install,tensorflow,lib,source,from-source,from-src,src,from.src* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get install tensorflow lib source from-source from-src src from.src" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,install,tensorflow,lib,source,from-source,from-src,src,from.src` + +`cm run script --tags=get,install,tensorflow,lib,source,from-source,from-src,src,from.src[,variations] ` + +*or* + +`cmr "get install tensorflow lib source from-source from-src src from.src"` + +`cmr "get install tensorflow lib source from-source from-src src from.src [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,install,tensorflow,lib,source,from-source,from-src,src,from.src' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,install,tensorflow,lib,source,from-source,from-src,src,from.src"``` + +#### Run this script via Docker (beta) + +`cm docker script "get install tensorflow lib source from-source from-src src from.src[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_tflite` + - Environment variables: + - *CM_TFLITE*: `on` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_URL: `https://github.com/tensorflow/tensorflow` +* CM_GIT_DEPTH: `1` +* CM_TFLITE: `off` + +
+ +#### Versions +Default version: `master` + +* `master` +* `v1.15.0` +* `v2.0.0` +* `v2.1.0` +* `v2.2.0` +* `v2.3.0` +* `v2.4.0` +* `v2.5.0` +* `v2.6.0` +* `v2.7.0` +* `v2.8.0` +* `v2.9.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,generic-sys-util,_zlib + * Enable this dependency only if all ENV vars are set:
+`{'CM_HOST_OS_FLAVOR': ['ubuntu'], 'CM_HOST_OS_VERSION': ['18.04']}` + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic-python-lib,_package.numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/_cm.json) + +___ +### Script output +`cmr "get install tensorflow lib source from-source from-src src from.src [,variations]" -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-frameworks/install-tflite-from-src.md b/docs/AI-ML-frameworks/install-tflite-from-src.md new file mode 100644 index 000000000..aa40f96eb --- /dev/null +++ b/docs/AI-ML-frameworks/install-tflite-from-src.md @@ -0,0 +1,135 @@ +Automatically generated README for this automation recipe: **install-tflite-from-src** + +Category: **AI/ML frameworks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-tflite-from-src,5c72dab5eb88407c) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,install,tflite-cmake,tensorflow-lite-cmake,from-src* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get install tflite-cmake tensorflow-lite-cmake from-src" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,install,tflite-cmake,tensorflow-lite-cmake,from-src` + +`cm run script --tags=get,install,tflite-cmake,tensorflow-lite-cmake,from-src ` + +*or* + +`cmr "get install tflite-cmake tensorflow-lite-cmake from-src"` + +`cmr "get install tflite-cmake tensorflow-lite-cmake from-src " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,install,tflite-cmake,tensorflow-lite-cmake,from-src' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,install,tflite-cmake,tensorflow-lite-cmake,from-src"``` + +#### Run this script via Docker (beta) + +`cm docker script "get install tflite-cmake tensorflow-lite-cmake from-src" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_DEPTH: `1` + +
+ +#### Versions +Default version: `master` + +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,compiler + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/_cm.json) + +___ +### Script output +`cmr "get install tflite-cmake tensorflow-lite-cmake from-src " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/convert-ml-model-huggingface-to-onnx.md b/docs/AI-ML-models/convert-ml-model-huggingface-to-onnx.md new file mode 100644 index 000000000..4c409f992 --- /dev/null +++ b/docs/AI-ML-models/convert-ml-model-huggingface-to-onnx.md @@ -0,0 +1,143 @@ +Automatically generated README for this automation recipe: **convert-ml-model-huggingface-to-onnx** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=convert-ml-model-huggingface-to-onnx,eacb01655d7e49ac) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *ml-model,model,huggingface-to-onnx,onnx,huggingface,convert* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "ml-model model huggingface-to-onnx onnx huggingface convert" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=ml-model,model,huggingface-to-onnx,onnx,huggingface,convert` + +`cm run script --tags=ml-model,model,huggingface-to-onnx,onnx,huggingface,convert[,variations] ` + +*or* + +`cmr "ml-model model huggingface-to-onnx onnx huggingface convert"` + +`cmr "ml-model model huggingface-to-onnx onnx huggingface convert [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'ml-model,model,huggingface-to-onnx,onnx,huggingface,convert' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="ml-model,model,huggingface-to-onnx,onnx,huggingface,convert"``` + +#### Run this script via Docker (beta) + +`cm docker script "ml-model model huggingface-to-onnx onnx huggingface convert[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_model-path.#` + - Environment variables: + - *CM_MODEL_HUGG_PATH*: `#` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/_cm.json) + +___ +### Script output +`cmr "ml-model model huggingface-to-onnx onnx huggingface convert [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL*` +* `CM_MODEL_HUGG_PATH` +* `HUGGINGFACE_ONNX_FILE_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-bert-squad-vocab.md b/docs/AI-ML-models/get-bert-squad-vocab.md new file mode 100644 index 000000000..3067bcb2e --- /dev/null +++ b/docs/AI-ML-models/get-bert-squad-vocab.md @@ -0,0 +1,119 @@ +Automatically generated README for this automation recipe: **get-bert-squad-vocab** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-bert-squad-vocab,2f99a545ce734157) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,bert,squad,bert-large,bert-squad,vocab* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get bert squad bert-large bert-squad vocab" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,bert,squad,bert-large,bert-squad,vocab` + +`cm run script --tags=get,bert,squad,bert-large,bert-squad,vocab ` + +*or* + +`cmr "get bert squad bert-large bert-squad vocab"` + +`cmr "get bert squad bert-large bert-squad vocab " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,bert,squad,bert-large,bert-squad,vocab' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,bert,squad,bert-large,bert-squad,vocab"``` + +#### Run this script via Docker (beta) + +`cm docker script "get bert squad bert-large bert-squad vocab" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab/_cm.json) + 1. Run "preprocess" function from customize.py + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab/_cm.json)*** + * download,file + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab/_cm.json) + +___ +### Script output +`cmr "get bert squad bert-large bert-squad vocab " -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-dlrm.md b/docs/AI-ML-models/get-dlrm.md new file mode 100644 index 000000000..9bb81a69a --- /dev/null +++ b/docs/AI-ML-models/get-dlrm.md @@ -0,0 +1,143 @@ +Automatically generated README for this automation recipe: **get-dlrm** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dlrm,63680ac2449a4241) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,dlrm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src dlrm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,dlrm` + +`cm run script --tags=get,src,dlrm[,variations] ` + +*or* + +`cmr "get src dlrm"` + +`cmr "get src dlrm [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,dlrm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,dlrm"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src dlrm[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_full-history` + - Environment variables: + - *CM_GIT_DEPTH*: `` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_DEPTH: `--depth 10` +* CM_GIT_PATCH: `no` +* CM_GIT_URL: `https://github.com/facebookresearch/dlrm.git` + +
+ +#### Versions +Default version: `main` + +* `main` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/_cm.json) + +___ +### Script output +`cmr "get src dlrm [,variations]" -j` +#### New environment keys (filter) + +* `DLRM_DIR` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-ml-model-3d-unet-kits19.md b/docs/AI-ML-models/get-ml-model-3d-unet-kits19.md new file mode 100644 index 000000000..1ae4ae572 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-3d-unet-kits19.md @@ -0,0 +1,200 @@ +Automatically generated README for this automation recipe: **get-ml-model-3d-unet-kits19** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-3d-unet-kits19,fb7e31419c0f4226) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,raw,3d-unet,kits19,medical-imaging* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model raw 3d-unet kits19 medical-imaging" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,raw,3d-unet,kits19,medical-imaging` + +`cm run script --tags=get,ml-model,raw,3d-unet,kits19,medical-imaging[,variations] ` + +*or* + +`cmr "get ml-model raw 3d-unet kits19 medical-imaging"` + +`cmr "get ml-model raw 3d-unet kits19 medical-imaging [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,raw,3d-unet,kits19,medical-imaging' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,raw,3d-unet,kits19,medical-imaging"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model raw 3d-unet kits19 medical-imaging[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_onnx,fp32` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.86170` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128_dynbatch.onnx?download=1` + - Workflow: + * `_pytorch,fp32` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.86170` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/5597155/files/3dunet_kits19_pytorch.ptc?download=1` + - Workflow: + * `_pytorch,fp32,weights` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.86170` + - *CM_ML_MODEL_FILE*: `retinanet_model_10.pth` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/5597155/files/3dunet_kits19_pytorch_checkpoint.pth?download=1` + - *CM_UNZIP*: `yes` + - Workflow: + * `_tf,fp32` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.86170` + - *CM_ML_MODEL_FILE*: `3dunet_kits19_128x128x128.tf` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128.tf.zip?download=1` + - *CM_UNZIP*: `yes` + - Workflow: + * `_weights` + - Environment variables: + - *CM_MODEL_WEIGHTS_FILE*: `yes` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_onnx`** (default) + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - Workflow: + * `_tf` + - Aliases: `_tensorflow` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `tensorflow` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_onnx` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/_cm.json) + +___ +### Script output +`cmr "get ml-model raw 3d-unet kits19 medical-imaging [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-bert-base-squad.md b/docs/AI-ML-models/get-ml-model-bert-base-squad.md new file mode 100644 index 000000000..28bc15a1b --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-bert-base-squad.md @@ -0,0 +1,183 @@ +Automatically generated README for this automation recipe: **get-ml-model-bert-base-squad** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-bert-base-squad,b3b10b452ce24c5f) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model raw bert bert-base bert-squad language language-processing" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing` + +`cm run script --tags=get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing[,variations] ` + +*or* + +`cmr "get ml-model raw bert bert-base bert-squad language language-processing"` + +`cmr "get ml-model raw bert bert-base bert-squad language language-processing [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model raw bert bert-base bert-squad language language-processing[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_deepsparse,int8` + - Environment variables: + - *CM_ML_MODEL_F1*: `87.89` + - *CM_ML_MODEL_FILE*: `model.onnx` + - *CM_PRUNING_PERCENTAGE*: `95` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,zoo,deepsparse,_pruned95_obs_quant-none + * CM names: `--adr.['neural-magic-zoo-downloader']...` + - *Warning: no scripts found* + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_deepsparse` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `deepsparse` + - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` + - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` + - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` + - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` + - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_PRECISION*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_QUANTIZED*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_fp32` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad/_cm.json) + 1. Run "preprocess" function from customize.py + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad/_cm.json)*** + * download-and-extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_TMP_ML_MODEL_REQUIRE_DOWNLOAD': 'yes'}` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad/_cm.json)*** + * get,bert,squad,vocab + - CM script: [get-bert-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bert-squad-vocab) + +___ +### Script output +`cmr "get ml-model raw bert bert-base bert-squad language language-processing [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-ml-model-bert-large-squad.md b/docs/AI-ML-models/get-ml-model-bert-large-squad.md new file mode 100644 index 000000000..df467b7a4 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-bert-large-squad.md @@ -0,0 +1,357 @@ +Automatically generated README for this automation recipe: **get-ml-model-bert-large-squad** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-bert-large-squad,5e865dbdc65949d2) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model raw bert bert-large bert-squad language language-processing" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing` + +`cm run script --tags=get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing[,variations] ` + +*or* + +`cmr "get ml-model raw bert bert-large bert-squad language language-processing"` + +`cmr "get ml-model raw bert bert-large bert-squad language language-processing [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model raw bert bert-large bert-squad language language-processing[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_deepsparse,int8` + - Environment variables: + - *CM_ML_MODEL_F1*: `90.21282641816266` + - *CM_ML_MODEL_FILE*: `oBERT-Large_95sparse_block4_qat.onnx` + - *CM_DAE_EXTRACT_DOWNLOADED*: `yes` + - Workflow: + * `_deepsparse,int8,github` + - Environment variables: + - *CM_PACKAGE_URL*: `https://github.com/mlcommons/inference_results_v2.1/raw/master/open/NeuralMagic/code/bert/deepsparse/models/oBERT-Large_95sparse_block4_qat.onnx.tar.xz` + - Workflow: + * `_onnx,fp32` + - Environment variables: + - *CM_ML_MODEL_F1*: `90.874` + - Workflow: + * `_onnx,fp32,armi` + - Environment variables: + - *CM_PACKAGE_URL*: `https://armi.in/files/model.onnx` + - *CM_PACKAGE_URL1*: `https://zenodo.org/record/3733910/files/model.onnx` + - Workflow: + * `_onnx,fp32,zenodo` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3733910/files/model.onnx` + - Workflow: + * `_onnx,int8` + - Environment variables: + - *CM_ML_MODEL_F1*: `90.067` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` + - Workflow: + * `_onnx,int8,amazon-s3` + - Environment variables: + - *CM_PACKAGE_URL*: `https://mlperf-public.s3.us-west-2.amazonaws.com/bert_large_v1_1_fake_quant.onnx` + - Workflow: + * `_onnx,int8,zenodo` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` + - Workflow: + * `_onnxruntime` + - Workflow: + * `_pytorch,fp32` + - Environment variables: + - *CM_ML_MODEL_F1*: `90.874` + - *CM_DOWNLOAD_CHECKSUM*: `00fbcbfaebfa20d87ac9885120a6e9b4` + - Workflow: + * `_pytorch,fp32,armi` + - Environment variables: + - *CM_PACKAGE_URL*: `https://armi.in/files/fp32/model.pytorch` + - *CM_PACKAGE_URL1*: `https://zenodo.org/record/3733896/files/model.pytorch` + - Workflow: + * `_pytorch,fp32,zenodo` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3733896/files/model.pytorch` + - Workflow: + * `_pytorch,int8` + - Environment variables: + - *CM_ML_MODEL_F1*: `90.633` + - Workflow: + * `_pytorch,int8,armi` + - Environment variables: + - *CM_PACKAGE_URL*: `https://armi.in/files/int8/pytorch_model.bin` + - *CM_PACKAGE_URL1*: `https://zenodo.org/record/4792496/files/pytorch_model.bin` + - Workflow: + * `_pytorch,int8,zenodo` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/4792496/files/pytorch_model.bin` + - Workflow: + * `_tensorflow` + - Workflow: + * `_tf,fp32` + - Environment variables: + - *CM_ML_MODEL_F1*: `90.874` + - Workflow: + * `_tf,fp32,zenodo` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3939747/files/model.pb` + - Workflow: + +
+ + + * Group "**download-source**" +
+ Click here to expand this section. + + * `_amazon-s3` + - Workflow: + * `_armi` + - Workflow: + * `_custom-url.#` + - Environment variables: + - *CM_PACKAGE_URL*: `#` + - Workflow: + * `_github` + - Workflow: + * `_zenodo` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_deepsparse` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `deepsparse` + - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` + - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` + - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` + - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` + - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` + - Workflow: + * **`_onnx`** (default) + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` + - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` + - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` + - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` + - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` + - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` + - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` + - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` + - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` + - Workflow: + * `_tf` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `tf` + - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` + - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` + - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` + - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` + - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` + - Workflow: + +
+ + + * Group "**packing**" +
+ Click here to expand this section. + + * `_packed` + - Environment variables: + - *CM_ML_MODEL_BERT_PACKED*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_torch + * CM names: `--adr.['torch', 'pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.tensorflow + * CM names: `--adr.['tensorflow']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.transformers + * CM names: `--adr.['transformers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.protobuf + * CM names: `--adr.['protobuf']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.onnx + * CM names: `--adr.['onnx']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx-graphsurgeon + * CM names: `--adr.['onnx-graphsurgeon']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + * CM names: `--adr.['numpy']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,mlperf,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + 1. ***Read "prehook_deps" on other CM scripts*** + * download,file,_wget,_url.https://zenodo.org/record/3733868/files/model.ckpt-5474.data-00000-of-00001 + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_wget,_url.https://zenodo.org/record/3733868/files/model.ckpt-5474.index + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_wget,_url.https://zenodo.org/record/3733868/files/model.ckpt-5474.meta + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_wget,_url.https://zenodo.org/record/3733868/files/vocab.txt + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_wget,_url.https://raw.githubusercontent.com/krai/axs2kilt/main/model_onnx_bert_large_packed_recipe/convert_model.py + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * **`_unpacked`** (default) + - Environment variables: + - *CM_ML_MODEL_BERT_PACKED*: `no` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_PRECISION*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_QUANTIZED*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_onnx,_unpacked` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/_cm.json)*** + * download-and-extract + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_ML_MODEL_BERT_PACKED': ['yes']}` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + * [run-packed.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/run-packed.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/_cm.json)*** + * get,dataset-aux,squad-vocab + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + +___ +### Script output +`cmr "get ml-model raw bert bert-large bert-squad language language-processing [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_BERT_LARGE_FP32_PATH` +* `CM_ML_MODEL_BERT_LARGE_INT8_PATH` +* `CM_ML_MODEL_BERT_PACKED_PATH` +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-dlrm-terabyte.md b/docs/AI-ML-models/get-ml-model-dlrm-terabyte.md new file mode 100644 index 000000000..cc5c0328a --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-dlrm-terabyte.md @@ -0,0 +1,262 @@ +Automatically generated README for this automation recipe: **get-ml-model-dlrm-terabyte** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-dlrm-terabyte,8fa7582c603a4db3) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation` + +`cm run script --tags=get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation[,variations] [--input_flags]` + +*or* + +`cmr "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation"` + +`cmr "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_debug` + - Environment variables: + - *CM_ML_MODEL_DEBUG*: `yes` + - Workflow: + * `_onnx,fp32` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.8025` + - *CM_PACKAGE_URL*: `https://dlrm.s3-us-west-1.amazonaws.com/models/tb00_40M.onnx.tar` + - *CM_UNTAR*: `yes` + - *CM_ML_MODEL_FILE*: `tb00_40M.onnx` + - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `40000000` + - Workflow: + * `_onnx,fp32,debug` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.8107` + - *CM_PACKAGE_URL*: `https://dlrm.s3-us-west-1.amazonaws.com/models/tb0875_10M.onnx.tar` + - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `10000000` + - *CM_UNTAR*: `yes` + - *CM_ML_MODEL_FILE*: `tb0875_10M.onnx` + - Workflow: + * `_pytorch,fp32` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.8025` + - *CM_PACKAGE_URL*: `https://dlrm.s3-us-west-1.amazonaws.com/models/tb00_40M.pt` + - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `40000000` + - *CM_DOWNLOAD_CHECKSUM*: `2d49a5288cddb37c3c64860a06d79bb9` + - Workflow: + * `_pytorch,fp32,debug` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.8107` + - *CM_PACKAGE_URL*: `https://dlrm.s3-us-west-1.amazonaws.com/models/tb0875_10M.pt` + - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `10000000` + - Workflow: + * `_pytorch,fp32,weight_sharded` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.8025` + - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `40000000` + - *CM_ML_MODEL_FILE*: `model_weights` + - *CM_TMP_MODEL_ADDITIONAL_NAME*: `` + - *CM_DOWNLOAD_CHECKSUM*: `` + - Workflow: + * `_pytorch,fp32,weight_sharded,rclone` + - Environment variables: + - *CM_RCLONE_CONFIG_CMD*: `rclone config create mlc-inference s3 provider=Cloudflare access_key_id=f65ba5eef400db161ea49967de89f47b secret_access_key=fbea333914c292b854f14d3fe232bad6c5407bf0ab1bebf78833c2b359bdfd2b endpoint=https://c2686074cb2caf5cbaf6d134bdba8b47.r2.cloudflarestorage.com` + - *CM_PACKAGE_URL*: `mlc-inference:mlcommons-inference-wg-public/model_weights` + - Workflow: + * `_pytorch,fp32,weight_sharded,wget` + - Environment variables: + - *CM_PACKAGE_URL*: `https://cloud.mlcommons.org/index.php/s/XzfSeLgW8FYfR3S/download` + - *CM_DAE_EXTRACT_DOWNLOADED*: `yes` + - *CM_DOWNLOAD_FILENAME*: `download` + - *CM_EXTRACT_UNZIP*: `yes` + - Workflow: + +
+ + + * Group "**download-tool**" +
+ Click here to expand this section. + + * `_rclone` + - Workflow: + * `_wget` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_onnx` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - Workflow: + * **`_pytorch`** (default) + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - *CM_TMP_MODEL_ADDITIONAL_NAME*: `dlrm_terabyte.pytorch` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + +
+ + + * Group "**type**" +
+ Click here to expand this section. + + * **`_weight_sharded`** (default) + - Environment variables: + - *CM_DLRM_MULTIHOT_MODEL*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_pytorch,_weight_sharded` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--dir=value` → `CM_DOWNLOAD_PATH=value` +* `--download_path=value` → `CM_DOWNLOAD_PATH=value` +* `--to=value` → `CM_DOWNLOAD_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/_cm.json) + 1. Run "preprocess" function from customize.py + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/_cm.json)*** + * download-and-extract + * CM names: `--adr.['dae']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/_cm.json) + +___ +### Script output +`cmr "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-ml-model-efficientnet-lite.md b/docs/AI-ML-models/get-ml-model-efficientnet-lite.md new file mode 100644 index 000000000..c81976666 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-efficientnet-lite.md @@ -0,0 +1,248 @@ +Automatically generated README for this automation recipe: **get-ml-model-efficientnet-lite** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-efficientnet-lite,1041f681977d4b7c) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification` + +`cm run script --tags=get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification[,variations] ` + +*or* + +`cmr "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification"` + +`cmr "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_tflite` + - Workflow: + +
+ + + * Group "**kind**" +
+ Click here to expand this section. + + * **`_lite0`** (default) + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite0` + - Workflow: + * `_lite1` + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite1` + - Workflow: + * `_lite2` + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite2` + - Workflow: + * `_lite3` + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite3` + - Workflow: + * `_lite4` + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite4` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_PRECISION*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - Workflow: + * `_uint8` + - Aliases: `_int8` + - Environment variables: + - *CM_ML_MODEL_EFFICIENTNET_LITE_PRECISION*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `uint8` + - Workflow: + +
+ + + * Group "**resolution**" +
+ Click here to expand this section. + + * **`_resolution-224`** (default) + - Environment variables: + - *CM_ML_MODEL_IMAGE_HEIGHT*: `224` + - *CM_ML_MODEL_IMAGE_WIDTH*: `224` + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `224` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.224` + - Workflow: + * `_resolution-240` + - Environment variables: + - *CM_ML_MODEL_IMAGE_HEIGHT*: `240` + - *CM_ML_MODEL_IMAGE_WIDTH*: `240` + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `240` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.240` + - Workflow: + * `_resolution-260` + - Environment variables: + - *CM_ML_MODEL_IMAGE_HEIGHT*: `260` + - *CM_ML_MODEL_IMAGE_WIDTH*: `260` + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `260` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.260` + - Workflow: + * `_resolution-280` + - Environment variables: + - *CM_ML_MODEL_IMAGE_HEIGHT*: `280` + - *CM_ML_MODEL_IMAGE_WIDTH*: `280` + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `280` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.280` + - Workflow: + * `_resolution-300` + - Environment variables: + - *CM_ML_MODEL_IMAGE_HEIGHT*: `300` + - *CM_ML_MODEL_IMAGE_WIDTH*: `300` + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `300` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.300` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_lite0,_resolution-224` + +#### Valid variation combinations checked by the community + + + +* `_lite0,_resolution-224` +* `_lite1,_resolution-240` +* `_lite2,_resolution-260` +* `_lite3,_resolution-280` +* `_lite4,_resolution-300` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ML_MODEL_INPUTS_DATA_TYPE: `fp32` +* CM_ML_MODEL_PRECISION: `fp32` +* CM_ML_MODEL_WEIGHTS_DATA_TYPE: `fp32` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/_cm.json) + +___ +### Script output +`cmr "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_PATH` +* `CM_ML_MODEL_STARTING_WEIGHTS_FILENAME` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-gptj.md b/docs/AI-ML-models/get-ml-model-gptj.md new file mode 100644 index 000000000..5231048a7 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-gptj.md @@ -0,0 +1,321 @@ +Automatically generated README for this automation recipe: **get-ml-model-gptj** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-gptj,a41166210f294fbf) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,gptj,gpt-j,large-language-model* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get raw ml-model gptj gpt-j large-language-model" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,raw,ml-model,gptj,gpt-j,large-language-model` + +`cm run script --tags=get,raw,ml-model,gptj,gpt-j,large-language-model[,variations] [--input_flags]` + +*or* + +`cmr "get raw ml-model gptj gpt-j large-language-model"` + +`cmr "get raw ml-model gptj gpt-j large-language-model [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,raw,ml-model,gptj,gpt-j,large-language-model' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,raw,ml-model,gptj,gpt-j,large-language-model"``` + +#### Run this script via Docker (beta) + +`cm docker script "get raw ml-model gptj gpt-j large-language-model[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_ML_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_pytorch,fp32` + - Environment variables: + - *CM_DOWNLOAD_EXTRA_OPTIONS*: ` --output-document checkpoint.zip` + - *CM_UNZIP*: `yes` + - *CM_DOWNLOAD_CHECKSUM_NOT_USED*: `e677e28aaf03da84584bb3073b7ee315` + - *CM_PACKAGE_URL*: `https://cloud.mlcommons.org/index.php/s/QAZ2oM94MkFtbQx/download` + - *CM_RCLONE_CONFIG_CMD*: `rclone config create mlc-inference s3 provider=Cloudflare access_key_id=f65ba5eef400db161ea49967de89f47b secret_access_key=fbea333914c292b854f14d3fe232bad6c5407bf0ab1bebf78833c2b359bdfd2b endpoint=https://c2686074cb2caf5cbaf6d134bdba8b47.r2.cloudflarestorage.com` + - *CM_RCLONE_URL*: `mlc-inference:mlcommons-inference-wg-public/gpt-j` + - Workflow: + * `_pytorch,fp32,wget` + - Workflow: + * `_pytorch,int4,intel` + - Workflow: + * `_pytorch,int8,intel` + - Workflow: + * `_pytorch,intel` + - Environment variables: + - *CM_GPTJ_INTEL_MODEL*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,mlperf,inference,results + - CM script: [get-mlperf-inference-results](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results) + - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) + * get,ml-model,gpt-j,_fp32,_pytorch + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + * get,conda,_name.gptj-pt + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,python,_conda.gptj-pt + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic,conda-package,_package.intel-openmp,_source.intel + * CM names: `--adr.['conda-package', 'intel-openmp']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.jemalloc,_source.conda-forge + * CM names: `--adr.['conda-package', 'jemalloc']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * install,ipex,from.src,_for-intel-mlperf-inference-v3.1-gptj + - CM script: [install-ipex-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-ipex-from-src) + * get,dataset,cnndm,_calibration + - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) + * `_saxml,fp32` + - Environment variables: + - *CM_TMP_MODEL_SAXML*: `fp32` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,gptj,_pytorch,_fp32 + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_package.jax[cpu] + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.paxml + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.praxis + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.accelerate + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_saxml,int8` + - Environment variables: + - *CM_TMP_MODEL_SAXML*: `int8` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,gptj,_saxml,_fp32 + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_package.praxis + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.apache-beam + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,git,repo,_repo.https://github.com/google/saxml + * CM names: `--adr.['saxml']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + +
+ + + * Group "**download-tool**" +
+ Click here to expand this section. + + * **`_rclone`** (default) + - Environment variables: + - *CM_DOWNLOAD_FILENAME*: `checkpoint` + - *CM_DOWNLOAD_URL*: `<<>>` + - Workflow: + * `_wget` + - Environment variables: + - *CM_DOWNLOAD_URL*: `<<>>` + - *CM_DOWNLOAD_FILENAME*: `checkpoint.zip` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_pytorch`** (default) + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - *CM_ML_STARTING_WEIGHTS_FILENAME*: `<<>>` + - Workflow: + * `_saxml` + - Workflow: + +
+ + + * Group "**model-provider**" +
+ Click here to expand this section. + + * `_intel` + - Workflow: + * **`_mlcommons`** (default) + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_fp32` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + * `_int4` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int4` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int4` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` + - Workflow: + +
+ + +#### Default variations + +`_mlcommons,_pytorch,_rclone` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--checkpoint=value` → `GPTJ_CHECKPOINT_PATH=value` +* `--download_path=value` → `CM_DOWNLOAD_PATH=value` +* `--to=value` → `CM_DOWNLOAD_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "checkpoint":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/_cm.json)*** + * download-and-extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_TMP_REQUIRE_DOWNLOAD': ['yes']}` + * CM names: `--adr.['dae']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + * [run-int4-calibration.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/run-int4-calibration.sh) + * [run-intel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/run-intel.sh) + * [run-saxml-quantized.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/run-saxml-quantized.sh) + * [run-saxml.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/run-saxml.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/_cm.json) + +___ +### Script output +`cmr "get raw ml-model gptj gpt-j large-language-model [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +* `GPTJ_CHECKPOINT_PATH` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_WEIGHT_DATA_TYPES` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-huggingface-zoo.md b/docs/AI-ML-models/get-ml-model-huggingface-zoo.md new file mode 100644 index 000000000..7e5d18f56 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-huggingface-zoo.md @@ -0,0 +1,192 @@ +Automatically generated README for this automation recipe: **get-ml-model-huggingface-zoo** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-huggingface-zoo,53cf8252a443446a) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,huggingface,zoo* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model huggingface zoo" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,huggingface,zoo` + +`cm run script --tags=get,ml-model,huggingface,zoo[,variations] [--input_flags]` + +*or* + +`cmr "get ml-model huggingface zoo"` + +`cmr "get ml-model huggingface zoo [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,huggingface,zoo' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,huggingface,zoo"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model huggingface zoo[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_model-stub.#` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `#` + - Workflow: + * `_onnx-subfolder` + - Environment variables: + - *CM_HF_SUBFOLDER*: `onnx` + - Workflow: + * `_pierreguillou_bert_base_cased_squad_v1.1_portuguese` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `pierreguillou/bert-base-cased-squad-v1.1-portuguese` + - Workflow: + * `_prune` + - Environment variables: + - *CM_MODEL_TASK*: `prune` + - Workflow: + +
+ + + * Group "**download-type**" +
+ Click here to expand this section. + + * `_clone-repo` + - Environment variables: + - *CM_GIT_CLONE_REPO*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,git,repo,_lfs + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--download_path=value` → `CM_DOWNLOAD_PATH=value` +* `--env_key=value` → `CM_MODEL_ZOO_ENV_KEY=value` +* `--full_subfolder=value` → `CM_HF_FULL_SUBFOLDER=value` +* `--model_filename=value` → `CM_MODEL_ZOO_FILENAME=value` +* `--revision=value` → `CM_HF_REVISION=value` +* `--subfolder=value` → `CM_HF_SUBFOLDER=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "download_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_huggingface_hub + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/_cm.json) + +___ +### Script output +`cmr "get ml-model huggingface zoo [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ML_MODEL*` +* `CM_MODEL_ZOO_STUB` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_'+env_key+'_FILE_WITH_PATH` +* `CM_ML_MODEL_'+env_key+'_PATH` +* `CM_ML_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-llama2.md b/docs/AI-ML-models/get-ml-model-llama2.md new file mode 100644 index 000000000..75957bee9 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-llama2.md @@ -0,0 +1,222 @@ +Automatically generated README for this automation recipe: **get-ml-model-llama2** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-llama2,5db97be9f61244c6) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get raw ml-model language-processing llama2 llama2-70b text-summarization" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization` + +`cm run script --tags=get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization[,variations] [--input_flags]` + +*or* + +`cmr "get raw ml-model language-processing llama2 llama2-70b text-summarization"` + +`cmr "get raw ml-model language-processing llama2 llama2-70b text-summarization [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization"``` + +#### Run this script via Docker (beta) + +`cm docker script "get raw ml-model language-processing llama2 llama2-70b text-summarization[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_ML_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_pytorch,fp32` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_pytorch`** (default) + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - Workflow: + +
+ + + * Group "**huggingface-stub**" +
+ Click here to expand this section. + + * **`_meta-llama/Llama-2-70b-chat-hf`** (default) + - Environment variables: + - *CM_GIT_CHECKOUT_FOLDER*: `Llama-2-70b-chat-hf` + - *CM_MODEL_ZOO_ENV_KEY*: `LLAMA2` + - Workflow: + * `_meta-llama/Llama-2-7b-chat-hf` + - Environment variables: + - *CM_GIT_CHECKOUT_FOLDER*: `Llama-2-7b-chat-hf` + - *CM_MODEL_ZOO_ENV_KEY*: `LLAMA2` + - Workflow: + * `_stub.#` + - Environment variables: + - *CM_MODEL_ZOO_ENV_KEY*: `LLAMA2` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_meta-llama/Llama-2-70b-chat-hf,_pytorch` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--checkpoint=value` → `LLAMA2_CHECKPOINT_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "checkpoint":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/_cm.json)*** + * get,ml-model,huggingface,zoo,_clone-repo + * Enable this dependency only if all ENV vars are set:
+`{'CM_TMP_REQUIRE_DOWNLOAD': ['yes']}` + * CM names: `--adr.['hf-zoo']...` + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/_cm.json) + +___ +### Script output +`cmr "get raw ml-model language-processing llama2 llama2-70b text-summarization [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +* `LLAMA2_CHECKPOINT_PATH` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-mobilenet.md b/docs/AI-ML-models/get-ml-model-mobilenet.md new file mode 100644 index 000000000..94f71e697 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-mobilenet.md @@ -0,0 +1,470 @@ +Automatically generated README for this automation recipe: **get-ml-model-mobilenet** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-mobilenet,ce46675a3ab249e4) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model mobilenet raw ml-model-mobilenet image-classification" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification` + +`cm run script --tags=get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification[,variations] ` + +*or* + +`cmr "get ml-model mobilenet raw ml-model-mobilenet image-classification"` + +`cmr "get ml-model mobilenet raw ml-model-mobilenet image-classification [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model mobilenet raw ml-model-mobilenet image-classification[variations]" ` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_quantized_` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_SUFFIX*: `_quant` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `yes` + - Workflow: + * `_tf,from.google,v2,quantized_` + - Environment variables: + - *CM_PACKAGE_URL*: `https://storage.googleapis.com/mobilenet_v2/checkpoints/<<>>_v2_<<>>_<<>>.tgz` + - *CM_ML_MODEL_WEIGHTS_FILE*: `<<>>_v2_<<>>_<<>>.ckpt.data-00000-of-00001` + - *CM_ML_MODEL_FILE*: `model.tflite` + - *CM_EXTRACT_FOLDER*: `v2_<<>>_<<>>` + - *CM_UNTAR*: `yes` + - Workflow: + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_onnx,fp32,v1` + - Environment variables: + - *CM_ML_MODEL_NORMALIZE_DATA*: `yes` + - *CM_ML_MODEL_SUBTRACT_MEANS*: `no` + - *CM_ML_MODEL_VER*: `1_1.0_224` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input:0` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV1/Predictions/Reshape_1:0` + - Workflow: + * `_onnx,int8,v1` + - Environment variables: + - *CM_ML_MODEL_NORMALIZE_DATA*: `no` + - *CM_ML_MODEL_SUBTRACT_MEANS*: `yes` + - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `128.0 128.0 128.0` + - *CM_ML_MODEL_VER*: `1_1.0_224_quant` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `0` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `169` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3353417/files/Quantized%20MobileNet.zip` + - *CM_ML_MODEL_FILE*: `mobilenet_sym_no_bn.onnx` + - *CM_UNZIP*: `yes` + - Workflow: + * `_onnx,opset-11,fp32,v1` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/4735651/files/mobilenet_v1_1.0_224.onnx` + - Workflow: + * `_onnx,opset-8,fp32,v1` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3157894/files/mobilenet_v1_1.0_224.onnx` + - Workflow: + * `_tf,fp32,v1,resolution-224,multiplier-1.0` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `71.676` + - Workflow: + * `_tf,from.google,v1` + - Environment variables: + - *CM_PACKAGE_URL*: `http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_<<>>_<<>><<>>.tgz` + - *CM_UNTAR*: `yes` + - Workflow: + * `_tf,from.google,v2,fp32` + - Environment variables: + - *CM_PACKAGE_URL*: `https://storage.googleapis.com/mobilenet_v2/checkpoints/mobilenet_v2_<<>>_<<>>.tgz` + - *CM_ML_MODEL_WEIGHTS_FILE*: `mobilenet_v2_<<>>_<<>>.ckpt.data-00000-of-00001` + - *CM_ML_MODEL_FILE*: `mobilenet_v2_<<>>_<<>>.tflite` + - *CM_UNTAR*: `yes` + - Workflow: + * `_tf,from.google,v3` + - Environment variables: + - *CM_PACKAGE_URL*: `https://storage.googleapis.com/mobilenet_v3/checkpoints/v3-<<>>_<<>>_<<>>_<<>>.tgz` + - *CM_EXTRACT_FOLDER*: `v3-<<>>_<<>>_<<>>_<<>>` + - *CM_ML_MODEL_FILE*: `v3-<<>>_<<>>_<<>>_<<>>.tflite` + - *CM_UNTAR*: `yes` + - Workflow: + * `_tf,from.zenodo,v1` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/2269307/files/mobilenet_v1_<<>>_<<>><<>>.tgz` + - *CM_UNTAR*: `yes` + - Workflow: + * `_tf,int8,v1,resolution-224,multiplier-1.0` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `70.762` + - Workflow: + * `_tf,v1` + - Environment variables: + - *CM_ML_MODEL_VER*: `1_<<>>_<<>><<>>_2018_08_02` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV1/Predictions/Reshape_1` + - *CM_ML_MODEL_WEIGHTS_FILE*: `mobilenet_v1_<<>>_<<>><<>>.ckpt.data-00000-of-00001` + - *CM_ML_MODEL_FILE*: `mobilenet_v1_<<>>_<<>><<>>.tflite` + - Workflow: + * `_tf,v1,fp32` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_PREFIX*: `` + - Workflow: + * `_tf,v1,int8` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_SUFFIX*: `_quant` + - Workflow: + * `_tf,v1,uint8` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_SUFFIX*: `_quant` + - Workflow: + * `_tf,v2,fp32` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_PREFIX*: `` + - *CM_ML_MODEL_VER*: `2_<<>>_<<>>` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV2/Predictions/Reshape_1` + - Workflow: + * `_tf,v2,int8` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_PREFIX*: `quantized` + - *CM_ML_MODEL_VER*: `2_<<>>_<<>>` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV2/Predictions/Softmax` + - Workflow: + * `_tf,v2,uint8` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_NAME_PREFIX*: `quantized` + - *CM_ML_MODEL_VER*: `2_<<>>_<<>>` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV2/Predictions/Softmax` + - Workflow: + * `_tf,v3` + - Environment variables: + - *CM_ML_MODEL_VER*: `3_<<>>_<<>>` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV3/Predictions/Softmax` + - Workflow: + * `_tflite` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_onnx` + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - Workflow: + * **`_tf`** (default) + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` + - *CM_ML_MODEL_NORMALIZE_DATA*: `yes` + - *CM_ML_MODEL_SUBTRACT_MEANS*: `no` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input` + - Workflow: + +
+ + + * Group "**kind**" +
+ Click here to expand this section. + + * `_large` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_KIND*: `large` + - Workflow: + * `_large-minimalistic` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_KIND*: `large-minimalistic` + - Workflow: + * `_small` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_KIND*: `small` + - Workflow: + * `_small-minimalistic` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_KIND*: `small-minimalistic` + - Workflow: + +
+ + + * Group "**multiplier**" +
+ Click here to expand this section. + + * `_multiplier-0.25` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `0.25` + - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `25` + - Workflow: + * `_multiplier-0.35` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `0.35` + - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `35` + - Workflow: + * `_multiplier-0.5` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `0.5` + - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `50` + - Workflow: + * `_multiplier-0.75` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `0.75` + - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `75` + - Workflow: + * `_multiplier-1.0` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `1.0` + - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `100` + - Workflow: + +
+ + + * Group "**opset-version**" +
+ Click here to expand this section. + + * `_opset-11` + - Environment variables: + - *CM_ML_MODEL_ONNX_OPSET*: `11` + - Workflow: + * `_opset-8` + - Environment variables: + - *CM_ML_MODEL_ONNX_OPSET*: `8` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_MOBILENET_PRECISION*: `float` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_MOBILENET_PRECISION*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `uint8` + - *CM_ML_MODEL_MOBILENET_PRECISION*: `uint8` + - Workflow: + +
+ + + * Group "**resolution**" +
+ Click here to expand this section. + + * `_resolution-128` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `128` + - *CM_ML_MODEL_IMAGE_HEIGHT*: `128` + - *CM_ML_MODEL_IMAGE_WIDTH*: `128` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.128` + - Workflow: + * `_resolution-160` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `160` + - *CM_ML_MODEL_IMAGE_HEIGHT*: `160` + - *CM_ML_MODEL_IMAGE_WIDTH*: `160` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.160` + - Workflow: + * `_resolution-192` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `192` + - *CM_ML_MODEL_IMAGE_HEIGHT*: `192` + - *CM_ML_MODEL_IMAGE_WIDTH*: `192` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.192` + - Workflow: + * `_resolution-224` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `224` + - *CM_ML_MODEL_IMAGE_HEIGHT*: `224` + - *CM_ML_MODEL_IMAGE_WIDTH*: `224` + - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.224` + - Workflow: + +
+ + + * Group "**source**" +
+ Click here to expand this section. + + * `_from.google` + - Environment variables: + - *CM_DOWNLOAD_SOURCE*: `google` + - Workflow: + * `_from.zenodo` + - Environment variables: + - *CM_DOWNLOAD_SOURCE*: `zenodo` + - Workflow: + +
+ + + * Group "**version**" +
+ Click here to expand this section. + + * `_v1` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_VERSION*: `1` + - *CM_ML_MODEL_FULL_NAME*: `mobilenet-v1-precision_<<>>-<<>>-<<>>` + - Workflow: + * `_v2` + - Environment variables: + - *CM_ML_MODEL_MOBILENET_VERSION*: `2` + - *CM_ML_MODEL_VER*: `2` + - *CM_ML_MODEL_FULL_NAME*: `mobilenet-v2-precision_<<>>-<<>>-<<>>` + - Workflow: + * **`_v3`** (default) + - Environment variables: + - *CM_ML_MODEL_MOBILENET_VERSION*: `3` + - *CM_ML_MODEL_VER*: `3` + - *CM_ML_MODEL_FULL_NAME*: `mobilenet-v3-precision_<<>>-<<>>-<<>>` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_tf,_v3` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ML_MODEL: `mobilenet` +* CM_ML_MODEL_DATASET: `imagenet2012-val` +* CM_ML_MODEL_RETRAINING: `no` +* CM_ML_MODEL_WEIGHT_TRANSFORMATIONS: `no` +* CM_ML_MODEL_INPUTS_DATA_TYPE: `fp32` +* CM_ML_MODEL_WEIGHTS_DATA_TYPE: `fp32` +* CM_ML_MODEL_MOBILENET_NAME_SUFFIX: `` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/_cm.json) + +___ +### Script output +`cmr "get ml-model mobilenet raw ml-model-mobilenet image-classification [,variations]" -j` +#### New environment keys (filter) + +* `CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_PATH` +* `CM_ML_MODEL_STARTING_WEIGHTS_FILENAME` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-neuralmagic-zoo.md b/docs/AI-ML-models/get-ml-model-neuralmagic-zoo.md new file mode 100644 index 000000000..90b5c4731 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-neuralmagic-zoo.md @@ -0,0 +1,335 @@ +Automatically generated README for this automation recipe: **get-ml-model-neuralmagic-zoo** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-neuralmagic-zoo,adbb3f2525a14f97) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic` + +`cm run script --tags=get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic[,variations] ` + +*or* + +`cmr "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic"` + +`cmr "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_bert-base-pruned90-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/bert-base/pytorch/huggingface/squad/pruned90-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-base/pytorch/huggingface/squad/pruned90-none` + - *CM_ML_MODEL_FULL_NAME*: `bert-base-pruned90-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-base-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_bert-base-pruned95_obs_quant-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/bert-base/pytorch/huggingface/squad/pruned95_obs_quant-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-base/pytorch/huggingface/squad/pruned95_obs_quant-none` + - *CM_ML_MODEL_FULL_NAME*: `bert-base-pruned95_obs_quant-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-base-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `yes` + - Workflow: + * `_bert-base_cased-pruned90-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/bert-base_cased/pytorch/huggingface/squad/pruned90-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-base_cased/pytorch/huggingface/squad/pruned90-none` + - *CM_ML_MODEL_FULL_NAME*: `bert-base_cased-pruned90-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-base-cased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_bert-large-base-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/bert-large/pytorch/huggingface/squad/base-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-large/pytorch/huggingface/squad/base-none` + - *CM_ML_MODEL_FULL_NAME*: `bert-large-base-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_bert-large-pruned80_quant-none-vnni` + - Aliases: `_model-stub.zoo:nlp/question_answering/bert-large/pytorch/huggingface/squad/pruned80_quant-none-vnni` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-large/pytorch/huggingface/squad/pruned80_quant-none-vnni` + - *CM_ML_MODEL_FULL_NAME*: `bert-large-pruned80_quant-none-vnni-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_mobilebert-14layer_pruned50-none-vnni` + - Aliases: `_model-stub.zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/14layer_pruned50-none-vnni` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/14layer_pruned50-none-vnni` + - *CM_ML_MODEL_FULL_NAME*: `mobilebert-14layer_pruned50-none-vnni-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://storage.googleapis.com/cloud-tpu-checkpoints/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT.tar.gz` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_mobilebert-14layer_pruned50_quant-none-vnni` + - Aliases: `_model-stub.zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/14layer_pruned50_quant-none-vnni` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/14layer_pruned50_quant-none-vnni` + - *CM_ML_MODEL_FULL_NAME*: `mobilebert-14layer_pruned50_quant-none-vnni-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://storage.googleapis.com/cloud-tpu-checkpoints/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT.tar.gz` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `yes` + - Workflow: + * `_mobilebert-base_quant-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/base_quant-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/base_quant-none` + - *CM_ML_MODEL_FULL_NAME*: `mobilebert-base_quant-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://storage.googleapis.com/cloud-tpu-checkpoints/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT.tar.gz` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `yes` + - Workflow: + * `_mobilebert-none-base-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/base-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/base-none` + - *CM_ML_MODEL_FULL_NAME*: `mobilebert-none-base-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://storage.googleapis.com/cloud-tpu-checkpoints/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT.tar.gz` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_model-stub.#` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `#` + - Workflow: + * `_obert-base-pruned90-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/obert-base/pytorch/huggingface/squad/pruned90-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-base/pytorch/huggingface/squad/pruned90-none` + - *CM_ML_MODEL_FULL_NAME*: `obert-base-pruned90-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_obert-large-base-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/base-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/base-none` + - *CM_ML_MODEL_FULL_NAME*: `obert-large-base-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_obert-large-pruned95-none-vnni` + - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned95-none-vnni` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned95-none-vnni` + - *CM_ML_MODEL_FULL_NAME*: `obert-large-pruned95-none-vnni-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_obert-large-pruned95_quant-none-vnni` + - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned95_quant-none-vnni` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned95_quant-none-vnni` + - *CM_ML_MODEL_FULL_NAME*: `obert-large-pruned95_quant-none-vnni-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `yes` + - Workflow: + * `_obert-large-pruned97-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned97-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned97-none` + - *CM_ML_MODEL_FULL_NAME*: `obert-large-pruned97-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_obert-large-pruned97-quant-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned97_quant-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned97_quant-none` + - *CM_ML_MODEL_FULL_NAME*: `obert-large-pruned97-quant-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_oberta-base-pruned90-quant-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/oberta-base/pytorch/huggingface/squad/pruned90_quant-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/oberta-base/pytorch/huggingface/squad/pruned90_quant-none` + - *CM_ML_MODEL_FULL_NAME*: `oberta-base-pruned90-quant-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/roberta-base` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + * `_roberta-base-pruned85-quant-none` + - Aliases: `_model-stub.zoo:nlp/question_answering/roberta-base/pytorch/huggingface/squad/pruned85_quant-none` + - Environment variables: + - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/roberta-base/pytorch/huggingface/squad/pruned85_quant-none` + - *CM_ML_MODEL_FULL_NAME*: `roberta-base-pruned85-quant-none-bert-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/roberta-base` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` + - *CM_ML_MODEL_RETRAINING*: `no` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_package.protobuf + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_sparsezoo + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/_cm.json) + +___ +### Script output +`cmr "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic [,variations]" -j` +#### New environment keys (filter) + +* `CM_GET_DEPENDENT_CACHED_PATH` +* `CM_MLPERF_CUSTOM_MODEL_PATH` +* `CM_ML_MODEL*` +* `CM_MODEL_ZOO_STUB` +#### New environment keys auto-detected from customize + +* `CM_GET_DEPENDENT_CACHED_PATH` +* `CM_MLPERF_CUSTOM_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-resnet50.md b/docs/AI-ML-models/get-ml-model-resnet50.md new file mode 100644 index 000000000..ff2c976a8 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-resnet50.md @@ -0,0 +1,356 @@ +Automatically generated README for this automation recipe: **get-ml-model-resnet50** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-resnet50,56203e4e998b4bc0) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,resnet50,ml-model-resnet50,image-classification* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get raw ml-model resnet50 ml-model-resnet50 image-classification" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,raw,ml-model,resnet50,ml-model-resnet50,image-classification` + +`cm run script --tags=get,raw,ml-model,resnet50,ml-model-resnet50,image-classification[,variations] ` + +*or* + +`cmr "get raw ml-model resnet50 ml-model-resnet50 image-classification"` + +`cmr "get raw ml-model resnet50 ml-model-resnet50 image-classification [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,raw,ml-model,resnet50,ml-model-resnet50,image-classification' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,raw,ml-model,resnet50,ml-model-resnet50,image-classification"``` + +#### Run this script via Docker (beta) + +`cm docker script "get raw ml-model resnet50 ml-model-resnet50 image-classification[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_ML_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_batch_size.1` + - Environment variables: + - *CM_ML_MODEL_BATCH_SIZE*: `1` + - Workflow: + * `_fix-input-shape` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * `_from-tf` + - Workflow: + * `_huggingface_default` + - Environment variables: + - *CM_PACKAGE_URL*: `https://huggingface.co/ctuning/mlperf-inference-resnet50-onnx-fp32-imagenet2012-v1.0/resolve/main/resnet50_v1.onnx` + - Workflow: + * `_ncnn,fp32` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/8073420/files/resnet50_v1.bin?download=1` + - Workflow: + 1. ***Read "post_deps" on other CM scripts*** + * download-and-extract,_url.https://zenodo.org/record/8073420/files/resnet50_v1.param?download= + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + * `_onnx,from-tf` + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - *CM_ML_MODEL_INPUT_LAYERS*: `input_tensor` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor\": (BATCH_SIZE, 224, 224, 3)` + - *CM_ML_MODEL_OUTPUT_LAYERS*: `softmax_tensor` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `softmax_tensor` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/2535873/files/resnet50_v1.pb` + - Workflow: + * `_onnx,from-tf,fp32` + - Environment variables: + - *CM_DOWNLOAD_FILENAME*: `resnet50_v1_modified.onnx` + - *CM_PACKAGE_URL*: `https://drive.google.com/uc?id=15wZ_8Vt12cb10IEBsln8wksD1zGwlbOM` + - Workflow: + * `_onnx,opset-11` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/4735647/files/resnet50_v1.onnx` + - Workflow: + * `_onnx,opset-8` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/2592612/files/resnet50_v1.onnx` + - Workflow: + * `_pytorch,fp32` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/4588417/files/resnet50-19c8e357.pth` + - Workflow: + * `_pytorch,int8` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/4589637/files/resnet50_INT8bit_quantized.pt` + - Workflow: + * `_tensorflow,fix-input-shape` + - Environment variables: + - *CM_ML_MODEL_TF_FIX_INPUT_SHAPE*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.tensorflow + * CM names: `--adr.['tensorflow']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_tflite,argmax` + - Environment variables: + - *CM_DAE_EXTRACT_DOWNLOADED*: `yes` + - *CM_DOWNLOAD_FINAL_ENV_NAME*: `` + - *CM_EXTRACT_FINAL_ENV_NAME*: `CM_ML_MODEL_FILE_WITH_PATH` + - *CM_ML_MODEL_FILE*: `resnet50_v1.tflite` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor 2\": (BATCH_SIZE, 224, 224, 3)` + - *CM_PACKAGE_URL*: `https://www.dropbox.com/s/cvv2zlfo80h54uz/resnet50_v1.tflite.gz?dl=1` + - Workflow: + * `_tflite,int8,no-argmax` + - Environment variables: + - *CM_DOWNLOAD_FINAL_ENV_NAME*: `CM_ML_MODEL_FILE_WITH_PATH` + - *CM_ML_MODEL_FILE*: `resnet50_quant_full_mlperf_edgetpu.tflite` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor 2\": (BATCH_SIZE, 224, 224, 3)` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/8234946/files/resnet50_quant_full_mlperf_edgetpu.tflite?download=1` + - Workflow: + * `_tflite,no-argmax` + - Environment variables: + - *CM_ML_MODEL_FILE*: `resnet50_v1.no-argmax.tflite` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor 2\": (BATCH_SIZE, 224, 224, 3)` + - *CM_PACKAGE_URL*: `https://www.dropbox.com/s/vhuqo0wc39lky0a/resnet50_v1.no-argmax.tflite?dl=1` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_ncnn` + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `ncnn` + - Workflow: + * **`_onnx`** (default) + - Aliases: `_onnxruntime` + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - *CM_ML_MODEL_INPUT_LAYERS*: `input_tensor:0` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor:0` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor:0\": (BATCH_SIZE, 3, 224, 224)` + - *CM_ML_MODEL_OUTPUT_LAYERS*: `softmax_tensor:0` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `softmax_tensor:0` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `<<>>` + - *CM_ML_MODEL_VER*: `1.5` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `?` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor:0` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor:0\": [BATCH_SIZE, 3, 224, 224]` + - *CM_ML_MODEL_OUTPUT_LAYERS*: `output` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `?` + - *CM_ML_STARTING_WEIGHTS_FILENAME*: `<<>>` + - Workflow: + * `_tensorflow` + - Aliases: `_tf` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `76.456` + - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` + - *CM_ML_MODEL_FRAMEWORK*: `tensorflow` + - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `123.68 116.78 103.94` + - *CM_ML_MODEL_INPUT_LAYERS*: `input_tensor` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor:0\": (BATCH_SIZE, 3, 224, 224)` + - *CM_ML_MODEL_NORMALIZE_DATA*: `0` + - *CM_ML_MODEL_OUTPUT_LAYERS*: `softmax_tensor` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `softmax_tensor` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `<<>>` + - *CM_ML_MODEL_SUBTRACT_MEANS*: `YES` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/2535873/files/resnet50_v1.pb` + - Workflow: + * `_tflite` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `76.456` + - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` + - *CM_ML_MODEL_FRAMEWORK*: `tflite` + - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `123.68 116.78 103.94` + - *CM_ML_MODEL_INPUT_LAYERS*: `input_tensor` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor` + - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor 2\": (BATCH_SIZE, 224, 224, 3)` + - *CM_ML_MODEL_NORMALIZE_DATA*: `0` + - *CM_ML_MODEL_OUTPUT_LAYERS*: `softmax_tensor` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `softmax_tensor` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `<<>>` + - *CM_ML_MODEL_SUBTRACT_MEANS*: `YES` + - Workflow: + +
+ + + * Group "**model-output**" +
+ Click here to expand this section. + + * **`_argmax`** (default) + - Environment variables: + - *CM_ML_MODEL_OUTPUT_LAYER_ARGMAX*: `yes` + - Workflow: + * `_no-argmax` + - Environment variables: + - *CM_ML_MODEL_OUTPUT_LAYER_ARGMAX*: `no` + - Workflow: + +
+ + + * Group "**opset-version**" +
+ Click here to expand this section. + + * `_opset-11` + - Environment variables: + - *CM_ML_MODEL_ONNX_OPSET*: `11` + - Workflow: + * `_opset-8` + - Environment variables: + - *CM_ML_MODEL_ONNX_OPSET*: `8` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` + - Workflow: + +
+ + +#### Default variations + +`_argmax,_fp32,_onnx` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/_cm.json)*** + * download-and-extract + * CM names: `--adr.['model-downloader']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + * [run-fix-input.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/run-fix-input.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/_cm.json) + +___ +### Script output +`cmr "get raw ml-model resnet50 ml-model-resnet50 image-classification [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_STARTING_FILE_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-retinanet-nvidia.md b/docs/AI-ML-models/get-ml-model-retinanet-nvidia.md new file mode 100644 index 000000000..5fba8e668 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-retinanet-nvidia.md @@ -0,0 +1,172 @@ +Automatically generated README for this automation recipe: **get-ml-model-retinanet-nvidia** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-retinanet-nvidia,f059d249fac843ba) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,nvidia-retinanet,nvidia* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model nvidia-retinanet nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,nvidia-retinanet,nvidia` + +`cm run script --tags=get,ml-model,nvidia-retinanet,nvidia[,variations] ` + +*or* + +`cmr "get ml-model nvidia-retinanet nvidia"` + +`cmr "get ml-model nvidia-retinanet nvidia [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,nvidia-retinanet,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,nvidia-retinanet,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model nvidia-retinanet nvidia[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_efficient-nms` + - Environment variables: + - *CM_NVIDIA_EFFICIENT_NMS*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_polygraphy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_TORCH_DEVICE: `cpu` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,training,src,_nvidia-retinanet + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + * get,mlperf,inference,src + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,ml-model,retinanet,_pytorch,_fp32,_weights + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + * get,generic-python-lib,_torch + * Enable this dependency only if all ENV vars are set:
+`{'CM_TORCH_DEVICE': 'cpu'}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_mlperf_logging + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_TORCH_DEVICE': 'cuda'}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,generic-python-lib,_torch_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_TORCH_DEVICE': 'cuda'}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,nvidia,mlperf,inference,common-code,-_custom + - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/_cm.json) + +___ +### Script output +`cmr "get ml-model nvidia-retinanet nvidia [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +* `CM_NVIDIA_RETINANET_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_ANCHOR_PATH` +* `CM_ML_MODEL_DYN_BATCHSIZE_PATH` +* `CM_NVIDIA_RETINANET_EFFICIENT_NMS_CONCAT_MODEL_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-retinanet.md b/docs/AI-ML-models/get-ml-model-retinanet.md new file mode 100644 index 000000000..4df53983b --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-retinanet.md @@ -0,0 +1,225 @@ +Automatically generated README for this automation recipe: **get-ml-model-retinanet** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-retinanet,427bc5665e4541c2) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,raw,resnext50,retinanet,object-detection* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model raw resnext50 retinanet object-detection" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,raw,resnext50,retinanet,object-detection` + +`cm run script --tags=get,ml-model,raw,resnext50,retinanet,object-detection[,variations] ` + +*or* + +`cmr "get ml-model raw resnext50 retinanet object-detection"` + +`cmr "get ml-model raw resnext50 retinanet object-detection [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,raw,resnext50,retinanet,object-detection' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,raw,resnext50,retinanet,object-detection"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model raw resnext50 retinanet object-detection[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_no-nms` + - Environment variables: + - *CM_TMP_ML_MODEL_RETINANET_NO_NMS*: `yes` + - *CM_ML_MODEL_RETINANET_NO_NMS*: `yes` + - *CM_QAIC_PRINT_NODE_PRECISION_INFO*: `yes` + - Workflow: + * `_onnx,fp32` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/6617879/files/resnext50_32x4d_fpn.onnx` + - *CM_DOWNLOAD_CHECKSUM*: `4544f4e56e0a4684215831cc937ea45c` + - *CM_ML_MODEL_ACCURACY*: `0.3757` + - Workflow: + * `_onnx,no-nms` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,python3 + * CM names: `--adr.['python, python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_package.onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.onnxsim + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * download,file,_url.https://raw.githubusercontent.com/arjunsuresh/ck-qaic/main/package/model-onnx-mlperf-retinanet-no-nms/remove-nms-and-extract-priors.patch + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * get,git,repo,_repo.https://github.com/mlcommons/training.git,_patch + * CM names: `--adr.['mlperf-training-src']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,ml-model,retinanet,_pytorch,_fp32,_weights + * CM names: `--adr.['pytorch-weights']...` + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + * get,generic-python-lib,_package.torch + * CM names: `--adr.['torch', 'pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_pytorch,fp32` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/6617981/files/resnext50_32x4d_fpn.pth` + - *CM_ML_MODEL_ACCURACY*: `0.3755` + - Workflow: + * `_pytorch,fp32,weights` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/6605272/files/retinanet_model_10.zip?download=1` + - *CM_UNZIP*: `yes` + - *CM_ML_MODEL_FILE*: `retinanet_model_10.pth` + - *CM_ML_MODEL_ACCURACY*: `0.3755` + - Workflow: + * `_weights` + - Environment variables: + - *CM_MODEL_WEIGHTS_FILE*: `yes` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_onnx`** (default) + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` + - *CM_ML_MODEL_FRAMEWORK*: `onnx` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_onnx` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/_cm.json)*** + * download-and-extract + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_TMP_ML_MODEL_RETINANET_NO_NMS': ['yes']}` + * CM names: `--adr.['dae']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + * [run-no-nms.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/run-no-nms.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/_cm.json) + +___ +### Script output +`cmr "get ml-model raw resnext50 retinanet object-detection [,variations]" -j` +#### New environment keys (filter) + +* `<<>>` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_RETINANET_QAIC_NODE_PRECISION_INFO_FILE_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-rnnt.md b/docs/AI-ML-models/get-ml-model-rnnt.md new file mode 100644 index 000000000..1d81ace52 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-rnnt.md @@ -0,0 +1,192 @@ +Automatically generated README for this automation recipe: **get-ml-model-rnnt** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-rnnt,8858f18b89774d28) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,rnnt,raw,librispeech,speech-recognition* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model rnnt raw librispeech speech-recognition" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,rnnt,raw,librispeech,speech-recognition` + +`cm run script --tags=get,ml-model,rnnt,raw,librispeech,speech-recognition[,variations] ` + +*or* + +`cmr "get ml-model rnnt raw librispeech speech-recognition"` + +`cmr "get ml-model rnnt raw librispeech speech-recognition [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,rnnt,raw,librispeech,speech-recognition' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,rnnt,raw,librispeech,speech-recognition"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model rnnt raw librispeech speech-recognition[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_pytorch,fp32` + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `0.07452253714852645` + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3662521/files/DistributedDataParallel_1576581068.9962234-epoch-100.pt?download=1` + - Workflow: + * `_pytorch,fp32,amazon-s3` + - Environment variables: + - *CM_PACKAGE_URL*: `https://mlperf-public.s3.us-west-2.amazonaws.com/DistributedDataParallel_1576581068.9962234-epoch-100.pt` + - Workflow: + * `_pytorch,fp32,zenodo` + - Environment variables: + - *CM_PACKAGE_URL*: `https://zenodo.org/record/3662521/files/DistributedDataParallel_1576581068.9962234-epoch-100.pt?download=1` + - Workflow: + * `_weights` + - Environment variables: + - *CM_MODEL_WEIGHTS_FILE*: `yes` + - Workflow: + +
+ + + * Group "**download-src**" +
+ Click here to expand this section. + + * **`_amazon-s3`** (default) + - Workflow: + * `_zenodo` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_pytorch`** (default) + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + +
+ + +#### Default variations + +`_amazon-s3,_fp32,_pytorch` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/_cm.json) + +___ +### Script output +`cmr "get ml-model rnnt raw librispeech speech-recognition [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-stable-diffusion.md b/docs/AI-ML-models/get-ml-model-stable-diffusion.md new file mode 100644 index 000000000..bf1378d78 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-stable-diffusion.md @@ -0,0 +1,256 @@ +Automatically generated README for this automation recipe: **get-ml-model-stable-diffusion** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-stable-diffusion,22c6516b2d4d4c23) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,stable-diffusion,sdxl,text-to-image* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get raw ml-model stable-diffusion sdxl text-to-image" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,raw,ml-model,stable-diffusion,sdxl,text-to-image` + +`cm run script --tags=get,raw,ml-model,stable-diffusion,sdxl,text-to-image[,variations] [--input_flags]` + +*or* + +`cmr "get raw ml-model stable-diffusion sdxl text-to-image"` + +`cmr "get raw ml-model stable-diffusion sdxl text-to-image [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,raw,ml-model,stable-diffusion,sdxl,text-to-image' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,raw,ml-model,stable-diffusion,sdxl,text-to-image"``` + +#### Run this script via Docker (beta) + +`cm docker script "get raw ml-model stable-diffusion sdxl text-to-image[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_ML_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_pytorch,fp16` + - Workflow: + * `_pytorch,fp32` + - Environment variables: + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0` + - Workflow: + * `_rclone,fp16` + - Environment variables: + - *CM_DOWNLOAD_URL*: `mlc-inference:mlcommons-inference-wg-public/stable_diffusion_fp16` + - Workflow: + * `_rclone,fp32` + - Environment variables: + - *CM_DOWNLOAD_URL*: `mlc-inference:mlcommons-inference-wg-public/stable_diffusion_fp32` + - Workflow: + +
+ + + * Group "**download-source**" +
+ Click here to expand this section. + + * `_huggingface` + - Workflow: + * **`_mlcommons`** (default) + - Workflow: + +
+ + + * Group "**download-tool**" +
+ Click here to expand this section. + + * `_git` + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `git` + - Workflow: + * `_rclone` + - Environment variables: + - *CM_RCLONE_CONFIG_CMD*: `rclone config create mlc-inference s3 provider=Cloudflare access_key_id=f65ba5eef400db161ea49967de89f47b secret_access_key=fbea333914c292b854f14d3fe232bad6c5407bf0ab1bebf78833c2b359bdfd2b endpoint=https://c2686074cb2caf5cbaf6d134bdba8b47.r2.cloudflarestorage.com` + - *CM_DOWNLOAD_TOOL*: `rclone` + - Workflow: + * `_wget` + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `wget` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_pytorch`** (default) + - Environment variables: + - *CM_ML_MODEL_FRAMEWORK*: `pytorch` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_fp16` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp16` + - *CM_ML_MODEL_PRECISION*: `fp16` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp16` + - Workflow: + * **`_fp32`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_mlcommons,_pytorch` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--checkpoint=value` → `SDXL_CHECKPOINT_PATH=value` +* `--download_path=value` → `CM_DOWNLOAD_PATH=value` +* `--to=value` → `CM_DOWNLOAD_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "checkpoint":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/_cm.json)*** + * get,ml-model,huggingface,zoo,_clone-repo,_model-stub.stabilityai/stable-diffusion-xl-base-1.0 + * Enable this dependency only if all ENV vars are set:
+`{'CM_TMP_REQUIRE_DOWNLOAD': ['yes'], 'CM_DOWNLOAD_TOOL': ['git']}` + * CM names: `--adr.['hf-zoo']...` + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + * download-and-extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_TMP_REQUIRE_DOWNLOAD': ['yes'], 'CM_DOWNLOAD_TOOL': ['rclone']}` + * CM names: `--adr.['dae']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/_cm.json) + +___ +### Script output +`cmr "get raw ml-model stable-diffusion sdxl text-to-image [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +* `SDXL_CHECKPOINT_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-ml-model-tiny-resnet.md b/docs/AI-ML-models/get-ml-model-tiny-resnet.md new file mode 100644 index 000000000..b58796435 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-tiny-resnet.md @@ -0,0 +1,213 @@ +Automatically generated README for this automation recipe: **get-ml-model-tiny-resnet** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-tiny-resnet,dd5ec11c3f6e49eb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification` + +`cm run script --tags=get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification[,variations] ` + +*or* + +`cmr "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification"` + +`cmr "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification"``` + +#### Run this script via Docker (beta) + +`cm docker script "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_ML_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_tflite,int8` + - Environment variables: + - *CM_PACKAGE_URL*: `https://github.com/mlcommons/tiny/raw/master/benchmark/training/image_classification/trained_models/pretrainedResnet_quant.tflite` + - *CM_DOWNLOAD_CHECKSUM*: `2d6dd48722471313e4c4528249205ae3` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_onnx` + - Environment variables: + - *CM_TMP_ML_MODEL_TF2ONNX*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,python3 + * CM names: `--adr.['python,python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,tiny,model,resnet,_tflite + * CM names: `--adr.['tflite-resnet-model', 'dependent-model']...` + - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) + * get,generic-python-lib,_package.tf2onnx + * CM names: `--adr.['tf2onnx']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * **`_tflite`** (default) + - Environment variables: + - *CM_ML_MODEL_ACCURACY*: `85` + - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` + - *CM_ML_MODEL_FRAMEWORK*: `tflite` + - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `` + - *CM_ML_MODEL_INPUT_LAYERS*: `` + - *CM_ML_MODEL_INPUT_LAYER_NAME*: `` + - *CM_ML_MODEL_INPUT_SHAPES*: `` + - *CM_ML_MODEL_NORMALIZE_DATA*: `0` + - *CM_ML_MODEL_OUTPUT_LAYERS*: `` + - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `<<>>` + - *CM_ML_MODEL_SUBTRACT_MEANS*: `YES` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_fp32` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` + - *CM_ML_MODEL_PRECISION*: `fp32` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` + - Workflow: + * **`_int8`** (default) + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` + - *CM_ML_MODEL_PRECISION*: `int8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` + - *CM_ML_MODEL_PRECISION*: `uint8` + - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` + - Workflow: + +
+ + +#### Default variations + +`_int8,_tflite` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/_cm.json)*** + * download-and-extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_PACKAGE_URL': ['on']}` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/_cm.json) + +___ +### Script output +`cmr "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo.md b/docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo.md new file mode 100644 index 000000000..9d979bdb8 --- /dev/null +++ b/docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo.md @@ -0,0 +1,147 @@ +Automatically generated README for this automation recipe: **get-ml-model-using-imagenet-from-model-zoo** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-using-imagenet-from-model-zoo,153e08828c4e45cc) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,model-zoo,zoo,imagenet,image-classification* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model model-zoo zoo imagenet image-classification" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,model-zoo,zoo,imagenet,image-classification` + +`cm run script --tags=get,ml-model,model-zoo,zoo,imagenet,image-classification[,variations] ` + +*or* + +`cmr "get ml-model model-zoo zoo imagenet image-classification"` + +`cmr "get ml-model model-zoo zoo imagenet image-classification [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,model-zoo,zoo,imagenet,image-classification' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,model-zoo,zoo,imagenet,image-classification"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model model-zoo zoo imagenet image-classification[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**model-source**" +
+ Click here to expand this section. + + * `_model.#` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,zoo,deepsparse,_model-stub.# + * CM names: `--adr.['neural-magic-zoo-downloader']...` + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + * `_model.resnet101-pytorch-base` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,zoo,deepsparse,_model-stub.zoo:cv/classification/resnet_v1-101/pytorch/sparseml/imagenet/base-none + * CM names: `--adr.['neural-magic-zoo-downloader']...` + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + * `_model.resnet50-pruned95-uniform-quant` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,zoo,deepsparse,_model-stub.zoo:cv/classification/resnet_v1-50/pytorch/sparseml/imagenet/pruned95_uniform_quant-none + * CM names: `--adr.['neural-magic-zoo-downloader']...` + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/_cm.json) + +___ +### Script output +`cmr "get ml-model model-zoo zoo imagenet image-classification [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL*` +#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-tvm-model.md b/docs/AI-ML-models/get-tvm-model.md new file mode 100644 index 000000000..61775fb9f --- /dev/null +++ b/docs/AI-ML-models/get-tvm-model.md @@ -0,0 +1,288 @@ +Automatically generated README for this automation recipe: **get-tvm-model** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-tvm-model,c1b7b656b6224307) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model-tvm,tvm-model* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ml-model-tvm tvm-model" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model-tvm,tvm-model` + +`cm run script --tags=get,ml-model-tvm,tvm-model[,variations] ` + +*or* + +`cmr "get ml-model-tvm tvm-model"` + +`cmr "get ml-model-tvm tvm-model [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model-tvm,tvm-model' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model-tvm,tvm-model"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model-tvm tvm-model[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_tune-model` + - Environment variables: + - *CM_TUNE_TVM_MODEL*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_xgboost + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tornado + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**batchsize**" +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_ML_MODEL_MAX_BATCH_SIZE*: `#` + - Workflow: + +
+ + + * Group "**frontend**" +
+ Click here to expand this section. + + * **`_onnx`** (default) + - Environment variables: + - *CM_TVM_FRONTEND_FRAMEWORK*: `onnx` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_onnx + * CM names: `--adr.['onnx']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_pytorch` + - Aliases: `_torch` + - Environment variables: + - *CM_TVM_FRONTEND_FRAMEWORK*: `pytorch` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch + * CM names: `--adr.['pytorch', 'torch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_tensorflow` + - Aliases: `_tf` + - Environment variables: + - *CM_TVM_FRONTEND_FRAMEWORK*: `tensorflow` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_tensorflow + * CM names: `--adr.['tensorflow']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_tflite` + - Environment variables: + - *CM_TVM_FRONTEND_FRAMEWORK*: `tflite` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_tflite + * CM names: `--adr.['tflite']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_model.#` + - Environment variables: + - *CM_ML_MODEL*: `#` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Workflow: + * `_int8` + - Workflow: + * `_uint8` + - Workflow: + +
+ + + * Group "**runtime**" +
+ Click here to expand this section. + + * `_graph_executor` + - Environment variables: + - *CM_TVM_USE_VM*: `no` + - Workflow: + * **`_virtual_machine`** (default) + - Environment variables: + - *CM_TVM_USE_VM*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_fp32,_onnx,_virtual_machine` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ML_MODEL_MAX_BATCH_SIZE: `1` +* CM_TUNE_TVM_MODEL: `no` +* CM_TVM_USE_VM: `yes` +* CM_TVM_FRONTEND_FRAMEWORK: `onnx` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,tvm + * CM names: `--adr.['tvm']...` + - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) + * get,generic-python-lib,_decorator + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_psutil + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_scipy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_attrs + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/_cm.json)*** + * get,ml-model,raw + * CM names: `--adr.['original-model']...` + - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) + - CM script: [get-ml-model-bert-base-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-base-squad) + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) + - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) + - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) + - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/_cm.json) + +___ +### Script output +`cmr "get ml-model-tvm tvm-model [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +* `CM_TUNE_TVM_*` +* `CM_TVM_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_ML_MODEL_FRAMEWORK` +* `CM_ML_MODEL_INPUT_SHAPES` +* `CM_ML_MODEL_ORIGINAL_FILE_WITH_PATH` +* `CM_ML_MODEL_PATH` +* `CM_TUNE_TVM_MODEL` +* `CM_TVM_FRONTEND_FRAMEWORK` \ No newline at end of file diff --git a/docs/AI-ML-optimization/calibrate-model-for.qaic.md b/docs/AI-ML-optimization/calibrate-model-for.qaic.md new file mode 100644 index 000000000..9441e4566 --- /dev/null +++ b/docs/AI-ML-optimization/calibrate-model-for.qaic.md @@ -0,0 +1,289 @@ +Automatically generated README for this automation recipe: **calibrate-model-for.qaic** + +Category: **AI/ML optimization** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=calibrate-model-for.qaic,817bad70df2f4e45) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *qaic,calibrate,profile,qaic-profile,qaic-calibrate* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "qaic calibrate profile qaic-profile qaic-calibrate" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=qaic,calibrate,profile,qaic-profile,qaic-calibrate` + +`cm run script --tags=qaic,calibrate,profile,qaic-profile,qaic-calibrate[,variations] ` + +*or* + +`cmr "qaic calibrate profile qaic-profile qaic-calibrate"` + +`cmr "qaic calibrate profile qaic-profile qaic-calibrate [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'qaic,calibrate,profile,qaic-profile,qaic-calibrate' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="qaic,calibrate,profile,qaic-profile,qaic-calibrate"``` + +#### Run this script via Docker (beta) + +`cm docker script "qaic calibrate profile qaic-profile qaic-calibrate[variations]" ` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_bert_` + - Environment variables: + - *CM_QAIC_MODEL_NAME*: `bert-large` + - *CM_CREATE_INPUT_BATCH*: `no` + - Workflow: + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_first.#` + - Workflow: + * `_resnet50,tf` + - Environment variables: + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_resnet50_tf` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_bs.#` + - Environment variables: + - *CM_QAIC_MODEL_BATCH_SIZE*: `#` + - *CM_CREATE_INPUT_BATCH*: `yes` + - Workflow: + * `_bs.1` + - Environment variables: + - *CM_QAIC_MODEL_BATCH_SIZE*: `1` + - *CM_CREATE_INPUT_BATCH*: `yes` + - Workflow: + +
+ + + * Group "**calib-dataset-filter-size**" +
+ Click here to expand this section. + + * `_filter-size.#` + - Workflow: + +
+ + + * Group "**calibration-option**" +
+ Click here to expand this section. + + * `_mlperf.option1` + - Workflow: + * `_mlperf.option2` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_bert-99` + - Environment variables: + - *CM_CALIBRATE_SQUAD*: `yes` + - *CM_QAIC_COMPILER_ARGS*: `` + - *CM_QAIC_COMPILER_PARAMS*: `-onnx-define-symbol=batch_size,1 -onnx-define-symbol=seg_length,<<>> -input-list-file=<<>> -num-histogram-bins=512 -profiling-threads=<<>>` + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_bert_mlperf` + - Workflow: + * `_resnet50` + - Environment variables: + - *CM_QAIC_MODEL_NAME*: `resnet50` + - *CM_CALIBRATE_IMAGENET*: `yes` + - *CM_QAIC_COMPILER_ARGS*: `` + - *CM_QAIC_COMPILER_PARAMS*: `-output-node-name=ArgMax -profiling-threads=<<>>` + - *CM_QAIC_OUTPUT_NODE_NAME*: `-output-node-name=ArgMax` + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_resnet50_tf` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_QAIC_MODEL_NAME*: `retinanet` + - *CM_CALIBRATE_OPENIMAGES*: `yes` + - *CM_QAIC_COMPILER_ARGS*: `` + - *CM_QAIC_COMPILER_PARAMS*: `-enable-channelwise -profiling-threads=<<>> -onnx-define-symbol=batch_size,<<>> -node-precision-info=<<>>` + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_retinanet_no_nms_mlperf` + - Workflow: + +
+ + + * Group "**model-framework**" +
+ Click here to expand this section. + + * `_tf` + - Workflow: + +
+ + + * Group "**seq-length**" +
+ Click here to expand this section. + + * `_seq.#` + - Environment variables: + - *CM_DATASET_SQUAD_TOKENIZED_MAX_SEQ_LENGTH*: `#` + - Workflow: + * `_seq.384` + - Environment variables: + - *CM_DATASET_SQUAD_TOKENIZED_MAX_SEQ_LENGTH*: `#` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,qaic,apps,sdk + * CM names: `--adr.['qaic-apps-sdk']...` + - CM script: [get-qaic-apps-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-apps-sdk) + * get,preprocessed,dataset,_calibration,openimages,_for.retinanet.onnx,_NCHW,_fp32,_custom-annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_CALIBRATE_OPENIMAGES': ['yes']}` + * CM names: `--adr.['openimages-cal', 'preprocessed-dataset']...` + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + * get,dataset,imagenet,preprocessed,_calibration,_for.resnet50,_float32,_rgb32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_CALIBRATE_IMAGENET': ['yes']}` + * CM names: `--adr.['imagenet-cal', 'preprocessed-calibration-dataset']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset,preprocessed,_calib1,squad,_pickle,_seq-length.384,_packed + * Enable this dependency only if all ENV vars are set:
+`{'CM_CALIBRATE_SQUAD': ['on']}` + * CM names: `--adr.['squad-cal', 'preprocessed-dataset']...` + - CM script: [get-preprocessed-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-squad) + * get,ml-model + * CM names: `--adr.['model-src']...` + - CM script: [get-ml-model-abtf-ssd-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/get-ml-model-abtf-ssd-pytorch) + - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) + - CM script: [get-ml-model-bert-base-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-base-squad) + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) + - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + - CM script: [get-ml-model-retinanet-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet-nvidia) + - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) + - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) + - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) + - CM script: [get-ml-model-using-imagenet-from-model-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-using-imagenet-from-model-zoo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/_cm.json) + +___ +### Script output +`cmr "qaic calibrate profile qaic-profile qaic-calibrate [,variations]" -j` +#### New environment keys (filter) + +* `CM_QAIC_MODEL_PROFILE_*` +#### New environment keys auto-detected from customize + +* `CM_QAIC_MODEL_PROFILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-optimization/compile-model-for.qaic.md b/docs/AI-ML-optimization/compile-model-for.qaic.md new file mode 100644 index 000000000..686f6dec2 --- /dev/null +++ b/docs/AI-ML-optimization/compile-model-for.qaic.md @@ -0,0 +1,438 @@ +Automatically generated README for this automation recipe: **compile-model-for.qaic** + +Category: **AI/ML optimization** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=compile-model-for.qaic,3f0f43b5d0304d1c) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *qaic,compile,model,model-compile,qaic-compile* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "qaic compile model model-compile qaic-compile" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=qaic,compile,model,model-compile,qaic-compile` + +`cm run script --tags=qaic,compile,model,model-compile,qaic-compile[,variations] [--input_flags]` + +*or* + +`cmr "qaic compile model model-compile qaic-compile"` + +`cmr "qaic compile model model-compile qaic-compile [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'qaic,compile,model,model-compile,qaic-compile' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="qaic,compile,model,model-compile,qaic-compile"``` + +#### Run this script via Docker (beta) + +`cm docker script "qaic compile model model-compile qaic-compile[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_bert-99` + - Environment variables: + - *CM_COMPILE_BERT*: `on` + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_bert_mlperf` + - *CM_QAIC_MODEL_COMPILER_PARAMS_BASE*: `-aic-hw -aic-hw-version=2.0 -execute-nodes-in-fp16=Add,Div,Erf,Softmax -quantization-schema=symmetric_with_uint8 -quantization-precision=Int8 -quantization-precision-bias=Int32 -vvv -compile-only -onnx-define-symbol=batch_size,1 -onnx-define-symbol=seg_length,384 -multicast-weights -combine-inputs=false -combine-outputs=false` + - *CM_QAIC_MODEL_COMPILER_ARGS*: `` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * calibrate,qaic,_bert-99 + * CM names: `--adr.['bert-profile', 'qaic-profile']...` + - CM script: [calibrate-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/calibrate-model-for.qaic) + * `_bert-99,offline` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-allocator-dealloc-delay=2 -size-split-granularity=1536 -vtcm-working-set-limit-ratio=1` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=2` + - Workflow: + * `_bert-99,offline,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=3` + - Workflow: + * `_bert-99,offline,nsp.16` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=2` + - Workflow: + * `_bert-99,server` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-allocator-dealloc-delay=2 -size-split-granularity=1536 -vtcm-working-set-limit-ratio=1` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=3` + - Workflow: + * `_bert-99,server,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=3` + - Workflow: + * `_bert-99,singlestream` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=8 -ols=1` + - Workflow: + * `_bert-99,singlestream,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=8 -ols=1` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_COMPILE_BERT*: `on` + - *CM_QAIC_MODEL_TO_CONVERT*: `bert_mlperf` + - *CM_QAIC_MODEL_COMPILER_PARAMS_BASE*: `-aic-hw -aic-hw-version=2.0 -convert-to-fp16 -vvv -compile-only -onnx-define-symbol=batch_size,1 -onnx-define-symbol=seg_length,384 -combine-inputs=false -combine-outputs=false` + - *CM_QAIC_MODEL_COMPILER_ARGS*: `` + - Workflow: + * `_bert-99.9,offline` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2 -mos=1 -ols=2` + - Workflow: + * `_bert-99.9,offline,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2 -mos=1 -ols=2` + - Workflow: + * `_bert-99.9,offline,nsp.16` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2 -mos=1 -ols=2` + - Workflow: + * `_bert-99.9,server` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2` + - Workflow: + * `_bert-99.9,server,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2` + - Workflow: + * `_resnet50` + - Environment variables: + - *CM_COMPILE_RESNET*: `on` + - *CM_QAIC_MODEL_TO_CONVERT*: `compile_resnet50_tf` + - *CM_QAIC_MODEL_COMPILER_PARAMS_BASE*: `-aic-hw -aic-hw-version=2.0 -quantization-schema=symmetric_with_uint8 -quantization-precision=Int8 -output-node-name=ArgMax -vvv -compile-only -use-producer-dma=1` + - Workflow: + * `_resnet50,multistream` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -mos=1 -ols=1` + - Workflow: + * `_resnet50,multistream,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4` + - Workflow: + * `_resnet50,offline` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-sdp-cluster-sizes=2,2 -multicast-weights` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -mos=1,2 -ols=4` + - Workflow: + * `_resnet50,offline,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -mos=1,2 -ols=4` + - Workflow: + * `_resnet50,server` + - Workflow: + * `_resnet50,server,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -ols=4` + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-sdp-cluster-sizes=2,2 -mos=1,2 -multicast-weights` + - Workflow: + * `_resnet50,server,nsp.16` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -ols=4` + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-sdp-cluster-sizes=4,4 -mos=1,4` + - Workflow: + * `_resnet50,singlestream` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-aic-num-of-instances=1` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=1 -ols=1` + - Workflow: + * `_resnet50,singlestream,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=1 -ols=1` + - Workflow: + * `_resnet50,tf` + - Environment variables: + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_resnet50_tf` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_COMPILE_RETINANET*: `on` + - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_retinanet_no_nms_mlperf` + - *CM_QAIC_MODEL_COMPILER_ARGS*: `-aic-enable-depth-first` + - *CM_QAIC_MODEL_COMPILER_PARAMS_BASE*: `-aic-hw -aic-hw-version=2.0 -compile-only -enable-channelwise -onnx-define-symbol=batch_size,1 -node-precision-info=<<>> -quantization-schema-constants=symmetric_with_uint8 -quantization-schema-activations=asymmetric -quantization-calibration=None` + - Workflow: + * `_retinanet,multistream` + - Workflow: + * `_retinanet,nsp.14` + - Workflow: + * `_retinanet,offline` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=1` + - Workflow: + * `_retinanet,offline,nsp.14` + - Workflow: + * `_retinanet,server` + - Workflow: + * `_retinanet,server,nsp.14` + - Workflow: + * `_retinanet,singlestream` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS*: `` + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=1 -ols=1` + - Workflow: + * `_retinanet,singlestream,nsp.14` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=1 -ols=1` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_bs.#` + - Environment variables: + - *CM_QAIC_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_bs.1` + - Environment variables: + - *CM_QAIC_MODEL_BATCH_SIZE*: `1` + - Workflow: + +
+ + + * Group "**calib-dataset-filter-size**" +
+ Click here to expand this section. + + * `_filter-size.#` + - Workflow: + +
+ + + * Group "**mlperf-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Workflow: + * `_offline` + - Workflow: + * `_server` + - Workflow: + * **`_singlestream`** (default) + - Workflow: + +
+ + + * Group "**model-framework**" +
+ Click here to expand this section. + + * `_tf` + - Workflow: + +
+ + + * Group "**nsp**" +
+ Click here to expand this section. + + * `_nsp.14` + - Workflow: + * `_nsp.16` + - Workflow: + * `_nsp.8` + - Workflow: + * `_nsp.9` + - Workflow: + +
+ + + * Group "**percentile-calibration**" +
+ Click here to expand this section. + + * `_pc.#` + - Environment variables: + - *CM_QAIC_MODEL_COMPILER_PERCENTILE_CALIBRATION_VALUE*: `#` + - *CM_QAIC_MODEL_COMPILER_QUANTIZATION_PARAMS*: `-quantization-calibration=Percentile -percentile-calibration-value=<<>>` + - Workflow: + +
+ + + * Group "**quantization**" +
+ Click here to expand this section. + + * `_no-quantized` + - Environment variables: + - *CM_QAIC_MODEL_QUANTIZATION*: `no` + - Workflow: + * **`_quantized`** (default) + - Environment variables: + - *CM_QAIC_MODEL_QUANTIZATION*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_quantized,_singlestream` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--register=value` → `CM_REGISTER_CACHE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "register":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,qaic,apps,sdk + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REGISTER_CACHE': ['on']}` + * CM names: `--adr.['qaic-apps-sdk']...` + - CM script: [get-qaic-apps-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-apps-sdk) + * qaic,calibrate,_retinanet + * Enable this dependency only if all ENV vars are set:
+`{'CM_COMPILE_RETINANET': ['yes']}` + * CM names: `--adr.['retinanet-profile', 'qaic-profile']...` + - CM script: [calibrate-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/calibrate-model-for.qaic) + * qaic,calibrate,_resnet50 + * Enable this dependency only if all ENV vars are set:
+`{'CM_COMPILE_RESNET': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REGISTER_CACHE': ['on']}` + * CM names: `--adr.['resnet-profile', 'qaic-profile']...` + - CM script: [calibrate-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/calibrate-model-for.qaic) + * get,ml-model + * CM names: `--adr.['model-src']...` + - CM script: [get-ml-model-abtf-ssd-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/get-ml-model-abtf-ssd-pytorch) + - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) + - CM script: [get-ml-model-bert-base-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-base-squad) + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) + - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + - CM script: [get-ml-model-retinanet-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet-nvidia) + - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) + - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) + - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) + - CM script: [get-ml-model-using-imagenet-from-model-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-using-imagenet-from-model-zoo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/_cm.json) + +___ +### Script output +`cmr "qaic compile model model-compile qaic-compile [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_QAIC_MODEL*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE_WITH_PATH` +* `CM_QAIC_MODEL_COMPILED_BINARY_WITH_PATH` +* `CM_QAIC_MODEL_FINAL_COMPILATION_CMD` \ No newline at end of file diff --git a/docs/AI-ML-optimization/prune-bert-models.md b/docs/AI-ML-optimization/prune-bert-models.md new file mode 100644 index 000000000..b491bf9cf --- /dev/null +++ b/docs/AI-ML-optimization/prune-bert-models.md @@ -0,0 +1,185 @@ +Automatically generated README for this automation recipe: **prune-bert-models** + +Category: **AI/ML optimization** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=prune-bert-models,76182d4896414216) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *prune,bert-models,bert-prune,prune-bert-models* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "prune bert-models bert-prune prune-bert-models" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=prune,bert-models,bert-prune,prune-bert-models` + +`cm run script --tags=prune,bert-models,bert-prune,prune-bert-models[,variations] [--input_flags]` + +*or* + +`cmr "prune bert-models bert-prune prune-bert-models"` + +`cmr "prune bert-models bert-prune prune-bert-models [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'prune,bert-models,bert-prune,prune-bert-models' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="prune,bert-models,bert-prune,prune-bert-models"``` + +#### Run this script via Docker (beta) + +`cm docker script "prune bert-models bert-prune prune-bert-models[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_model.#` + - Environment variables: + - *CM_BERT_PRUNE_MODEL_NAME*: `#` + - *CM_MODEL_ZOO_STUB*: `#` + - Workflow: + * `_path.#` + - Environment variables: + - *CM_BERT_PRUNE_CKPT_PATH*: `#` + - Workflow: + * `_task.#` + - Environment variables: + - *CM_BERT_PRUNE_TASK*: `#` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--constraint=value` → `CM_BERT_PRUNE_CONSTRAINT=value` +* `--output_dir=value` → `CM_BERT_PRUNE_OUTPUT_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "constraint":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BERT_PRUNE_TASK: `squad` +* CM_BERT_PRUNE_MODEL_NAME: `bert-large-uncased` +* CM_MODEL_ZOO_STUB: `bert-large-uncased` +* CM_BERT_PRUNE_CONSTRAINT: `0.5` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/_cm.json)*** + * get,python3 + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_scipy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_cupy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tqdm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch_cuda + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_datasets + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_scikit-learn + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,git,repo,_repo.https://github.com/cknowledge/retraining-free-pruning + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,ml-model,model,zoo,model-zoo,huggingface,_prune + * CM names: `--adr.['get-model']...` + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/_cm.json) + +___ +### Script output +`cmr "prune bert-models bert-prune prune-bert-models [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/CM-interface-prototyping/test-mlperf-inference-retinanet.md b/docs/CM-interface-prototyping/test-mlperf-inference-retinanet.md new file mode 100644 index 000000000..76c94e570 --- /dev/null +++ b/docs/CM-interface-prototyping/test-mlperf-inference-retinanet.md @@ -0,0 +1,135 @@ +Automatically generated README for this automation recipe: **test-mlperf-inference-retinanet** + +Category: **CM interface prototyping** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=test-mlperf-inference-retinanet,1cedbc3b642a403a) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *test,mlperf-inference-win,retinanet,windows* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "test mlperf-inference-win retinanet windows" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=test,mlperf-inference-win,retinanet,windows` + +`cm run script --tags=test,mlperf-inference-win,retinanet,windows ` + +*or* + +`cmr "test mlperf-inference-win retinanet windows"` + +`cmr "test mlperf-inference-win retinanet windows " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'test,mlperf-inference-win,retinanet,windows' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="test,mlperf-inference-win,retinanet,windows"``` + +#### Run this script via Docker (beta) + +`cm docker script "test mlperf-inference-win retinanet windows" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/_cm.json)*** + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_requests + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,loadgen + * CM names: `--adr.['loadgen', 'mlperf-inference-loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * mlperf,inference,source + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,dataset,open-images,original + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + * get,raw,ml-model,retinanet + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/_cm.json) + +___ +### Script output +`cmr "test mlperf-inference-win retinanet windows " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/CUDA-automation/get-cuda-devices.md b/docs/CUDA-automation/get-cuda-devices.md new file mode 100644 index 000000000..931e10be8 --- /dev/null +++ b/docs/CUDA-automation/get-cuda-devices.md @@ -0,0 +1,122 @@ +Automatically generated README for this automation recipe: **get-cuda-devices** + +Category: **CUDA automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cuda-devices,7a3ede4d3558427a) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,cuda-devices* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get cuda-devices" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,cuda-devices` + +`cm run script --tags=get,cuda-devices ` + +*or* + +`cmr "get cuda-devices"` + +`cmr "get cuda-devices " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,cuda-devices' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,cuda-devices"``` + +#### Run this script via Docker (beta) + +`cm docker script "get cuda-devices" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/_cm.json)*** + * get,cuda,_toolkit + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/_cm.json) + +___ +### Script output +`cmr "get cuda-devices " -j` +#### New environment keys (filter) + +* `CM_CUDA_DEVICE_*` +#### New environment keys auto-detected from customize diff --git a/docs/CUDA-automation/get-cuda.md b/docs/CUDA-automation/get-cuda.md new file mode 100644 index 000000000..d0b49f561 --- /dev/null +++ b/docs/CUDA-automation/get-cuda.md @@ -0,0 +1,230 @@ +Automatically generated README for this automation recipe: **get-cuda** + +Category: **CUDA automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cuda,46d133d9ef92422d) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- + +# System dependencies + +* Download [CUDA toolkit](https://developer.nvidia.com/cuda-toolkit). +* Download [cuDNN](https://developer.nvidia.com/rdp/cudnn-download). +* Download [TensorRT](https://developer.nvidia.com/nvidia-tensorrt-8x-download). + + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda` + +`cm run script --tags=get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda[,variations] [--input_flags]` + +*or* + +`cmr "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda"` + +`cmr "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda"``` + +#### Run this script via Docker (beta) + +`cm docker script "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_cudnn` + - Environment variables: + - *CM_CUDA_NEEDS_CUDNN*: `yes` + - Workflow: + 1. ***Read "post_deps" on other CM scripts*** + * get,nvidia,cudnn + * CM names: `--adr.['cudnn']...` + - CM script: [get-cudnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cudnn) + * `_package-manager` + - Environment variables: + - *CM_CUDA_PACKAGE_MANAGER_INSTALL*: `yes` + - Workflow: + +
+ + + * Group "**installation-mode**" +
+ Click here to expand this section. + + * `_lib-only` + - Environment variables: + - *CM_CUDA_FULL_TOOLKIT_INSTALL*: `no` + - *CM_TMP_FILE_TO_CHECK_UNIX*: `libcudart.so` + - *CM_TMP_FILE_TO_CHECK_WINDOWS*: `libcudart.dll` + - Workflow: + * **`_toolkit`** (default) + - Environment variables: + - *CM_CUDA_FULL_TOOLKIT_INSTALL*: `yes` + - *CM_TMP_FILE_TO_CHECK_UNIX*: `nvcc` + - *CM_TMP_FILE_TO_CHECK_WINDOWS*: `nvcc.exe` + - Workflow: + +
+ + +#### Default variations + +`_toolkit` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--cudnn_tar_file=value` → `CM_CUDNN_TAR_FILE_PATH=value` +* `--cudnn_tar_path=value` → `CM_CUDNN_TAR_FILE_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "cudnn_tar_file":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_CUDA_PATH_LIB_CUDNN_EXISTS: `no` +* CM_REQUIRE_INSTALL: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,cl + * Enable this dependency only if all ENV vars are set:
+`{'CM_CUDA_FULL_TOOLKIT_INSTALL': ['yes'], 'CM_HOST_OS_TYPE': ['windows']}` + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/_cm.json)*** + * install,cuda,prebuilt + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + * CM names: `--adr.['install-cuda-prebuilt']...` + - CM script: [install-cuda-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cuda-prebuilt) + * get,generic-sys-util,_nvidia-cuda-toolkit + * Enable this dependency only if all ENV vars are set:
+`{'CM_CUDA_PACKAGE_MANAGER_INSTALL': ['yes']}` + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/_cm.json) + +___ +### Script output +`cmr "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `+ LDFLAGS` +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_CUDA_*` +* `CM_NVCC_*` +* `CUDA_HOME` +* `CUDA_PATH` +#### New environment keys auto-detected from customize + +* `CM_CUDA_CACHE_TAGS` +* `CM_CUDA_FULL_TOOLKIT_INSTALL` +* `CM_CUDA_INSTALLED_PATH` +* `CM_CUDA_PATH_BIN` +* `CM_CUDA_PATH_INCLUDE` +* `CM_CUDA_PATH_LIB` +* `CM_CUDA_VERSION` +* `CM_CUDA_VERSION_STRING` +* `CM_NVCC_BIN` \ No newline at end of file diff --git a/docs/CUDA-automation/get-cudnn.md b/docs/CUDA-automation/get-cudnn.md new file mode 100644 index 000000000..224fb2641 --- /dev/null +++ b/docs/CUDA-automation/get-cudnn.md @@ -0,0 +1,167 @@ +Automatically generated README for this automation recipe: **get-cudnn** + +Category: **CUDA automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cudnn,d73ee19baee14df8) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,cudnn,nvidia* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get cudnn nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,cudnn,nvidia` + +`cm run script --tags=get,cudnn,nvidia [--input_flags]` + +*or* + +`cmr "get cudnn nvidia"` + +`cmr "get cudnn nvidia " [--input_flags]` + + + +#### Input Flags + +* --**input**=Full path to the installed cuDNN library +* --**tar_file**=Full path to the cuDNN Tar file downloaded from Nvidia website (https://developer.nvidia.com/cudnn) + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,cudnn,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,cudnn,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "get cudnn nvidia" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `CM_INPUT=value` +* `--tar_file=value` → `CM_CUDNN_TAR_FILE_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SUDO: `sudo` +* CM_INPUT: `` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,cuda + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CUDA_PATH_LIB': ['on'], 'CM_CUDA_PATH_INCLUDE': ['on']}` + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/_cm.json) + +___ +### Script output +`cmr "get cudnn nvidia " [--input_flags] -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_CUDA_PATH_INCLUDE_CUDNN` +* `CM_CUDA_PATH_LIB_CUDNN` +* `CM_CUDA_PATH_LIB_CUDNN_EXISTS` +* `CM_CUDNN_*` +#### New environment keys auto-detected from customize + +* `CM_CUDA_PATH_INCLUDE_CUDNN` +* `CM_CUDA_PATH_LIB_CUDNN` +* `CM_CUDA_PATH_LIB_CUDNN_EXISTS` +* `CM_CUDNN_VERSION` \ No newline at end of file diff --git a/docs/CUDA-automation/get-tensorrt.md b/docs/CUDA-automation/get-tensorrt.md new file mode 100644 index 000000000..07153e153 --- /dev/null +++ b/docs/CUDA-automation/get-tensorrt.md @@ -0,0 +1,176 @@ +Automatically generated README for this automation recipe: **get-tensorrt** + +Category: **CUDA automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-tensorrt,2a84ca505e4c408d) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,tensorrt,nvidia* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get tensorrt nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,tensorrt,nvidia` + +`cm run script --tags=get,tensorrt,nvidia[,variations] [--input_flags]` + +*or* + +`cmr "get tensorrt nvidia"` + +`cmr "get tensorrt nvidia [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**input**=Full path to the installed TensorRT library (nvinfer) +* --**tar_file**=Full path to the TensorRT Tar file downloaded from the Nvidia website (https://developer.nvidia.com/tensorrt) + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,tensorrt,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,tensorrt,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "get tensorrt nvidia[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_dev` + - Environment variables: + - *CM_TENSORRT_REQUIRE_DEV*: `yes` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `CM_INPUT=value` +* `--tar_file=value` → `CM_TENSORRT_TAR_FILE_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/_cm.json) + +___ +### Script output +`cmr "get tensorrt nvidia [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `+ LDFLAGS` +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_TENSORRT_*` +#### New environment keys auto-detected from customize + +* `CM_TENSORRT_INSTALL_PATH` +* `CM_TENSORRT_LIB_PATH` +* `CM_TENSORRT_VERSION` \ No newline at end of file diff --git a/docs/CUDA-automation/install-cuda-package-manager.md b/docs/CUDA-automation/install-cuda-package-manager.md new file mode 100644 index 000000000..e08286c94 --- /dev/null +++ b/docs/CUDA-automation/install-cuda-package-manager.md @@ -0,0 +1,124 @@ +Automatically generated README for this automation recipe: **install-cuda-package-manager** + +Category: **CUDA automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-cuda-package-manager,c1afdff8542f45be) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,package-manager,cuda,package-manager-cuda,install-pm-cuda* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install package-manager cuda package-manager-cuda install-pm-cuda" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,package-manager,cuda,package-manager-cuda,install-pm-cuda` + +`cm run script --tags=install,package-manager,cuda,package-manager-cuda,install-pm-cuda ` + +*or* + +`cmr "install package-manager cuda package-manager-cuda install-pm-cuda"` + +`cmr "install package-manager cuda package-manager-cuda install-pm-cuda " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,package-manager,cuda,package-manager-cuda,install-pm-cuda' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,package-manager,cuda,package-manager-cuda,install-pm-cuda"``` + +#### Run this script via Docker (beta) + +`cm docker script "install package-manager cuda package-manager-cuda install-pm-cuda" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/_cm.json) + 1. ***Run native script if exists*** + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/run-ubuntu.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/_cm.json)*** + * get,cuda + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + +___ +### Script output +`cmr "install package-manager cuda package-manager-cuda install-pm-cuda " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/CUDA-automation/install-cuda-prebuilt.md b/docs/CUDA-automation/install-cuda-prebuilt.md new file mode 100644 index 000000000..16a3fe50b --- /dev/null +++ b/docs/CUDA-automation/install-cuda-prebuilt.md @@ -0,0 +1,180 @@ +Automatically generated README for this automation recipe: **install-cuda-prebuilt** + +Category: **CUDA automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-cuda-prebuilt,14eadcd42ba340c3) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda` + +`cm run script --tags=install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda[,variations] [--input_flags]` + +*or* + +`cmr "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda"` + +`cmr "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda"``` + +#### Run this script via Docker (beta) + +`cm docker script "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**install-driver**" +
+ Click here to expand this section. + + * `_driver` + - Environment variables: + - *CM_CUDA_INSTALL_DRIVER*: `yes` + - Workflow: + * **`_no-driver`** (default) + - Environment variables: + - *CM_CUDA_INSTALL_DRIVER*: `no` + - Workflow: + +
+ + +#### Default variations + +`_no-driver` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--local_run_file_path=value` → `CUDA_RUN_FILE_LOCAL_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "local_run_file_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SUDO: `sudo` + +
+ +#### Versions +Default version: `11.8.0` + +* `11.7.0` +* `11.8.0` +* `12.0.0` +* `12.1.1` +* `12.2.0` +* `12.3.2` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/_cm.json)*** + * download,file + * CM names: `--adr.['download-script']...` + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/_cm.json)*** + * get,cuda + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + +___ +### Script output +`cmr "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_CUDA_*` +* `CM_NVCC_*` +#### New environment keys auto-detected from customize + +* `CM_CUDA_INSTALLED_PATH` +* `CM_NVCC_BIN_WITH_PATH` \ No newline at end of file diff --git a/docs/Cloud-automation/destroy-terraform.md b/docs/Cloud-automation/destroy-terraform.md new file mode 100644 index 000000000..514106a78 --- /dev/null +++ b/docs/Cloud-automation/destroy-terraform.md @@ -0,0 +1,121 @@ +Automatically generated README for this automation recipe: **destroy-terraform** + +Category: **Cloud automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=destroy-terraform,3463458d03054856) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *destroy,terraform,cmd* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "destroy terraform cmd" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=destroy,terraform,cmd` + +`cm run script --tags=destroy,terraform,cmd ` + +*or* + +`cmr "destroy terraform cmd"` + +`cmr "destroy terraform cmd " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'destroy,terraform,cmd' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="destroy,terraform,cmd"``` + +#### Run this script via Docker (beta) + +`cm docker script "destroy terraform cmd" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/_cm.json)*** + * get,terraform + * CM names: `--adr.['terraform']...` + - CM script: [get-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/get-terraform) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/_cm.json) + +___ +### Script output +`cmr "destroy terraform cmd " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Cloud-automation/get-aws-cli.md b/docs/Cloud-automation/get-aws-cli.md new file mode 100644 index 000000000..52bc80bfa --- /dev/null +++ b/docs/Cloud-automation/get-aws-cli.md @@ -0,0 +1,125 @@ +Automatically generated README for this automation recipe: **get-aws-cli** + +Category: **Cloud automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-aws-cli,dad67944229942a3) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,aws-cli,aws,cli* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get aws-cli aws cli" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,aws-cli,aws,cli` + +`cm run script --tags=get,aws-cli,aws,cli ` + +*or* + +`cmr "get aws-cli aws cli"` + +`cmr "get aws-cli aws cli " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,aws-cli,aws,cli' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,aws-cli,aws,cli"``` + +#### Run this script via Docker (beta) + +`cm docker script "get aws-cli aws cli" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/_cm.json)*** + * install,aws-cli + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-aws-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/install-aws-cli) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/_cm.json) + +___ +### Script output +`cmr "get aws-cli aws cli " -j` +#### New environment keys (filter) + +* `CM_AWS_*` +#### New environment keys auto-detected from customize + +* `CM_AWS_CACHE_TAGS` +* `CM_AWS_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Cloud-automation/get-terraform.md b/docs/Cloud-automation/get-terraform.md new file mode 100644 index 000000000..22b001ca8 --- /dev/null +++ b/docs/Cloud-automation/get-terraform.md @@ -0,0 +1,126 @@ +Automatically generated README for this automation recipe: **get-terraform** + +Category: **Cloud automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-terraform,66b33c38a4d7461e) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,terraform,get-terraform* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get terraform get-terraform" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,terraform,get-terraform` + +`cm run script --tags=get,terraform,get-terraform ` + +*or* + +`cmr "get terraform get-terraform"` + +`cmr "get terraform get-terraform " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,terraform,get-terraform' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,terraform,get-terraform"``` + +#### Run this script via Docker (beta) + +`cm docker script "get terraform get-terraform" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/_cm.json)*** + * install,terraform + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-terraform-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-terraform-from-src) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/_cm.json) + +___ +### Script output +`cmr "get terraform get-terraform " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_TERRAFORM_*` +#### New environment keys auto-detected from customize + +* `CM_TERRAFORM_CACHE_TAGS` +* `CM_TERRAFORM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Cloud-automation/install-aws-cli.md b/docs/Cloud-automation/install-aws-cli.md new file mode 100644 index 000000000..d142c7c07 --- /dev/null +++ b/docs/Cloud-automation/install-aws-cli.md @@ -0,0 +1,123 @@ +Automatically generated README for this automation recipe: **install-aws-cli** + +Category: **Cloud automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-aws-cli,4d3efd333c3f4d36) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,script,aws-cli,aws,cli* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install script aws-cli aws cli" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,script,aws-cli,aws,cli` + +`cm run script --tags=install,script,aws-cli,aws,cli ` + +*or* + +`cmr "install script aws-cli aws cli"` + +`cmr "install script aws-cli aws cli " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,script,aws-cli,aws,cli' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,script,aws-cli,aws,cli"``` + +#### Run this script via Docker (beta) + +`cm docker script "install script aws-cli aws cli" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/_cm.json)*** + * get,aws-cli + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-aws-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aws-cli) + +___ +### Script output +`cmr "install script aws-cli aws cli " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Cloud-automation/install-terraform-from-src.md b/docs/Cloud-automation/install-terraform-from-src.md new file mode 100644 index 000000000..0d7c83531 --- /dev/null +++ b/docs/Cloud-automation/install-terraform-from-src.md @@ -0,0 +1,130 @@ +Automatically generated README for this automation recipe: **install-terraform-from-src** + +Category: **Cloud automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-terraform-from-src,d79d47a074f34428) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,terraform,from-src* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install terraform from-src" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,terraform,from-src` + +`cm run script --tags=install,terraform,from-src ` + +*or* + +`cmr "install terraform from-src"` + +`cmr "install terraform from-src " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,terraform,from-src' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,terraform,from-src"``` + +#### Run this script via Docker (beta) + +`cm docker script "install terraform from-src" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `main` + +* `main` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,tool,go + - CM script: [get-go](https://github.com/mlcommons/cm4mlops/tree/master/script/get-go) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/_cm.json) + +___ +### Script output +`cmr "install terraform from-src " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_TERRAFORM_*` +#### New environment keys auto-detected from customize + +* `CM_TERRAFORM_BIN_WITH_PATH` +* `CM_TERRAFORM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Cloud-automation/run-terraform.md b/docs/Cloud-automation/run-terraform.md new file mode 100644 index 000000000..7e5699478 --- /dev/null +++ b/docs/Cloud-automation/run-terraform.md @@ -0,0 +1,481 @@ +Automatically generated README for this automation recipe: **run-terraform** + +Category: **Cloud automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-terraform,ec344bd44af144d7) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- + +## Setup for Google Cloud Instances +``` +sudo snap install google-cloud-cli --classic +gcloud auth application-default login +``` + +The above two commands will install google-cloud-cli and authorizes the user to access it. Once done, you can start creating gcp instance using CM commands like below. To destroy an instance just repeat the same command with `--destroy` option. + +``` +cm run script --tags=run,terraform,_gcp,_gcp_project.mlperf-inference-tests --cminit +``` +Here, `mlperf-inference-tests` is the name of the google project as created in [Google cloud console](https://console.cloud.google.com/apis/dashboard) + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,terraform* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run terraform" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,terraform` + +`cm run script --tags=run,terraform[,variations] [--input_flags]` + +*or* + +`cmr "run terraform"` + +`cmr "run terraform [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,terraform' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,terraform"``` + +#### Run this script via Docker (beta) + +`cm docker script "run terraform[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_amazon-linux-2-kernel.#` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE_OS*: `amazon-linux-2-kernel.#` + - Workflow: + * `_graviton` + - Environment variables: + - *CM_TERRAFORM_AWS_GRAVITON_INSTANCE*: `yes` + - Workflow: + * `_inferentia` + - Environment variables: + - *CM_TERRAFORM_AWS_INFERENTIA_INSTANCE*: `yes` + - Workflow: + * `_inferentia,amazon-linux-2-kernel.510` + - Workflow: + * `_rhel.#` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE_OS*: `rhel.#` + - Workflow: + * `_ubuntu.#` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE_OS*: `ubuntu.#` + - Workflow: + +
+ + + * Group "**aws-instance-image**" +
+ Click here to expand this section. + + * `_amazon-linux-2-kernel.510,arm64,us-west-2` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ami-0f1a5f5ada0e7da53` + - Workflow: + * `_aws_instance_image.#` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `#` + - Workflow: + * `_aws_instance_image.ami-0735c191cf914754d` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ami-0735c191cf914754d` + - Workflow: + * `_aws_instance_image.ami-0a0d8589b597d65b3` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ami-0a0d8589b597d65b3` + - Workflow: + * `_rhel.9,x86,us-west-2` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ami-0dda7e535b65b6469` + - Workflow: + * `_ubuntu.2204,arm64,us-west-2` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ami-079f51a7bcca65b92` + - Workflow: + * `_ubuntu.2204,x86,us-west-2` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ami-0735c191cf914754d` + - Workflow: + +
+ + + * Group "**aws-instance-type**" +
+ Click here to expand this section. + + * `_a1.2xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `a1.2xlarge` + - Workflow: + * `_a1.metal` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `a1.metal` + - Workflow: + * `_a1.xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `a1.xlarge` + - Workflow: + * `_aws_instance_type.#` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `#` + - Workflow: + * `_c5.12xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `c5.12xlarge` + - Workflow: + * `_c5.4xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `c5.4xlarge` + - Workflow: + * `_c5d.9xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `c5d.9xlarge` + - Workflow: + * `_g4dn.xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `g4dn.xlarge` + - Workflow: + * `_inf1.2xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `inf1.2xlarge` + - Workflow: + * `_inf1.xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `inf1.xlarge` + - Workflow: + * `_inf2.8xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `inf2.8xlarge` + - Workflow: + * `_inf2.xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `inf2.xlarge` + - Workflow: + * `_m7g.2xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `m7g.2xlarge` + - Workflow: + * `_m7g.xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `m7g.xlarge` + - Workflow: + * `_t2.#` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.#` + - Workflow: + * `_t2.2xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.2xlarge` + - Workflow: + * `_t2.large` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.large` + - Workflow: + * `_t2.medium` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.medium` + - Workflow: + * `_t2.micro` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.micro` + - Workflow: + * `_t2.nano` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.nano` + - Workflow: + * `_t2.small` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.small` + - Workflow: + * `_t2.xlarge` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `t2.xlarge` + - Workflow: + +
+ + + * Group "**cloud-provider**" +
+ Click here to expand this section. + + * **`_aws`** (default) + - Environment variables: + - *CM_TERRAFORM_CONFIG_DIR_NAME*: `aws` + - Workflow: + * `_gcp` + - Environment variables: + - *CM_TERRAFORM_CONFIG_DIR_NAME*: `gcp` + - Workflow: + +
+ + + * Group "**gcp-instance-image**" +
+ Click here to expand this section. + + * `_debian-cloud/debian-11` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `debian-cloud/debian-11` + - Workflow: + * `_gcp_instance_image.#` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `#` + - Workflow: + * `_ubuntu-2204-jammy-v20230114` + - Environment variables: + - *TF_VAR_INSTANCE_IMAGE*: `ubuntu-2204-jammy-v20230114` + - Workflow: + +
+ + + * Group "**gcp-instance-type**" +
+ Click here to expand this section. + + * `_f1-micro` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `f1-micro` + - Workflow: + * `_gcp_instance_type.#` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `#` + - Workflow: + * `_n1-highmem.#` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `n1-highmem-#` + - Workflow: + * `_n1-standard.#` + - Environment variables: + - *TF_VAR_INSTANCE_TYPE*: `n1-highmem-#` + - Workflow: + +
+ + + * Group "**gcp-project**" +
+ Click here to expand this section. + + * `_gcp_project.#` + - Environment variables: + - *TF_VAR_GCP_PROJECT*: `#` + - Workflow: + +
+ + + * Group "**instance-name**" +
+ Click here to expand this section. + + * `_instance_name.#` + - Environment variables: + - *TF_VAR_INSTANCE_NAME*: `#` + - Workflow: + +
+ + + * Group "**platform**" +
+ Click here to expand this section. + + * `_arm64` + - Environment variables: + - *CM_INSTANCE_PLATFORM*: `arm64` + - Workflow: + * **`_x86`** (default) + - Environment variables: + - *CM_INSTANCE_PLATFORM*: `x86` + - Workflow: + +
+ + + * Group "**region**" +
+ Click here to expand this section. + + * `_region.#` + - Environment variables: + - *TF_VAR_INSTANCE_REGION*: `#` + - Workflow: + * `_us-west-2` + - Environment variables: + - *TF_VAR_INSTANCE_REGION*: `us-west-2` + - Workflow: + +
+ + + * Group "**storage-size**" +
+ Click here to expand this section. + + * `_storage_size.#` + - Environment variables: + - *TF_VAR_DISK_GBS*: `#` + - Workflow: + * `_storage_size.8` + - Environment variables: + - *TF_VAR_DISK_GBS*: `8` + - Workflow: + +
+ + + * Group "**zone**" +
+ Click here to expand this section. + + * `_zone.#` + - Environment variables: + - *TF_VAR_INSTANCE_ZONE*: `#` + - Workflow: + +
+ + +#### Default variations + +`_aws,_x86` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--cminit=value` → `CM_TERRAFORM_CM_INIT=value` +* `--destroy=value` → `CM_DESTROY_TERRAFORM=value` +* `--gcp_credentials_json_file=value` → `CM_GCP_CREDENTIALS_JSON_PATH=value` +* `--key_file=value` → `CM_SSH_KEY_FILE=value` +* `--run_cmds=value` → `CM_TERRAFORM_RUN_COMMANDS=value` +* `--ssh_key_file=value` → `CM_SSH_KEY_FILE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "cminit":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* TF_VAR_SECURITY_GROUP_ID: `sg-0783752c97d2e011d` +* TF_VAR_CPU_COUNT: `1` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/_cm.json)*** + * get,terraform + - CM script: [get-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/get-terraform) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/_cm.json)*** + * destroy,terraform + * Enable this dependency only if all ENV vars are set:
+`{'CM_DESTROY_TERRAFORM': ['on']}` + * CM names: `--adr.['destroy-cmd']...` + - CM script: [destroy-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/destroy-terraform) + +___ +### Script output +`cmr "run terraform [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_TERRAFORM_CONFIG_DIR` +* `CM_TERRAFORM_RUN_DIR` +#### New environment keys auto-detected from customize + +* `CM_TERRAFORM_CONFIG_DIR` +* `CM_TERRAFORM_RUN_DIR` \ No newline at end of file diff --git a/docs/Collective-benchmarking/launch-benchmark.md b/docs/Collective-benchmarking/launch-benchmark.md new file mode 100644 index 000000000..84a904b86 --- /dev/null +++ b/docs/Collective-benchmarking/launch-benchmark.md @@ -0,0 +1,116 @@ +Automatically generated README for this automation recipe: **launch-benchmark** + +Category: **Collective benchmarking** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=launch-benchmark,5dc7662804bc4cad) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *launch,benchmark* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "launch benchmark" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=launch,benchmark` + +`cm run script --tags=launch,benchmark ` + +*or* + +`cmr "launch benchmark"` + +`cmr "launch benchmark " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'launch,benchmark' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="launch,benchmark"``` + +#### Run this script via Docker (beta) + +`cm docker script "launch benchmark" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/_cm.yaml) + +___ +### Script output +`cmr "launch benchmark " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/get-aocl.md b/docs/Compiler-automation/get-aocl.md new file mode 100644 index 000000000..efb20e415 --- /dev/null +++ b/docs/Compiler-automation/get-aocl.md @@ -0,0 +1,137 @@ +Automatically generated README for this automation recipe: **get-aocl** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-aocl,a65d3088f57d413d) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,lib,aocl,amd-optimized,amd* +* Output cached? *true* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get lib aocl amd-optimized amd" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,lib,aocl,amd-optimized,amd` + +`cm run script --tags=get,lib,aocl,amd-optimized,amd ` + +*or* + +`cmr "get lib aocl amd-optimized amd"` + +`cmr "get lib aocl amd-optimized amd " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,lib,aocl,amd-optimized,amd' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,lib,aocl,amd-optimized,amd"``` + +#### Run this script via Docker (beta) + +`cm docker script "get lib aocl amd-optimized amd" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `4.0` + +* `4.0` +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/_cm.json)*** + * get,generic,sys-util,_libmpfr-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic-python-lib,_scons + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,git,_repo.https://github.com/amd/aocl-libm-ose + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/_cm.json) + +___ +### Script output +`cmr "get lib aocl amd-optimized amd " -j` +#### New environment keys (filter) + +* `+LD_LIBRARY_PATH` +* `+LIBRARY_PATH` +* `CM_AOCL_BUILD_PATH` +* `CM_AOCL_LIB_PATH` +* `CM_AOCL_SRC_PATH` +#### New environment keys auto-detected from customize + +* `CM_AOCL_BUILD_PATH` +* `CM_AOCL_LIB_PATH` +* `CM_AOCL_SRC_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/get-cl.md b/docs/Compiler-automation/get-cl.md new file mode 100644 index 000000000..6168f1345 --- /dev/null +++ b/docs/Compiler-automation/get-cl.md @@ -0,0 +1,138 @@ +Automatically generated README for this automation recipe: **get-cl** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cl,7dbb770faff947c0) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,cl,compiler,c-compiler,cpp-compiler,get-cl* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get cl compiler c-compiler cpp-compiler get-cl" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,cl,compiler,c-compiler,cpp-compiler,get-cl` + +`cm run script --tags=get,cl,compiler,c-compiler,cpp-compiler,get-cl ` + +*or* + +`cmr "get cl compiler c-compiler cpp-compiler get-cl"` + +`cmr "get cl compiler c-compiler cpp-compiler get-cl " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,cl,compiler,c-compiler,cpp-compiler,get-cl' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,cl,compiler,c-compiler,cpp-compiler,get-cl"``` + +#### Run this script via Docker (beta) + +`cm docker script "get cl compiler c-compiler cpp-compiler get-cl" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/run.bat) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/_cm.json) + +___ +### Script output +`cmr "get cl compiler c-compiler cpp-compiler get-cl " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_CL_*` +* `CM_COMPILER_*` +* `CM_CXX_COMPILER_*` +* `CM_C_COMPILER_*` +* `CM_LINKER_*` +#### New environment keys auto-detected from customize + +* `CM_CL_BIN` +* `CM_CL_BIN_WITH_PATH` +* `CM_CL_CACHE_TAGS` +* `CM_COMPILER_CACHE_TAGS` +* `CM_COMPILER_FAMILY` +* `CM_COMPILER_VERSION` +* `CM_CXX_COMPILER_BIN` +* `CM_CXX_COMPILER_FLAG_OUTPUT` +* `CM_CXX_COMPILER_FLAG_VERSION` +* `CM_CXX_COMPILER_WITH_PATH` +* `CM_C_COMPILER_BIN` +* `CM_C_COMPILER_FLAG_OUTPUT` +* `CM_C_COMPILER_FLAG_VERSION` +* `CM_C_COMPILER_WITH_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/get-compiler-flags.md b/docs/Compiler-automation/get-compiler-flags.md new file mode 100644 index 000000000..b953d6388 --- /dev/null +++ b/docs/Compiler-automation/get-compiler-flags.md @@ -0,0 +1,130 @@ +Automatically generated README for this automation recipe: **get-compiler-flags** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-compiler-flags,31be8b74a69742f8) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,compiler-flags* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get compiler-flags" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,compiler-flags` + +`cm run script --tags=get,compiler-flags ` + +*or* + +`cmr "get compiler-flags"` + +`cmr "get compiler-flags " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,compiler-flags' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,compiler-flags"``` + +#### Run this script via Docker (beta) + +`cm docker script "get compiler-flags" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,compiler + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_C_COMPILER_BIN': ['on']}` + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/_cm.json) + +___ +### Script output +`cmr "get compiler-flags " -j` +#### New environment keys (filter) + +* `+ CFLAGS` +* `+ CXXFLAGS` +* `+ FFLAGS` +* `+ LDFLAGS` +* `+CM_HOST_OS_DEFAULT_INCLUDE_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/get-compiler-rust.md b/docs/Compiler-automation/get-compiler-rust.md new file mode 100644 index 000000000..f0692f37a --- /dev/null +++ b/docs/Compiler-automation/get-compiler-rust.md @@ -0,0 +1,120 @@ +Automatically generated README for this automation recipe: **get-compiler-rust** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-compiler-rust,97ffbd9e537b4b59) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,rust-compiler* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get rust-compiler" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,rust-compiler` + +`cm run script --tags=get,rust-compiler ` + +*or* + +`cmr "get rust-compiler"` + +`cmr "get rust-compiler " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,rust-compiler' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,rust-compiler"``` + +#### Run this script via Docker (beta) + +`cm docker script "get rust-compiler" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/_cm.yaml)*** + * get,python3 + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/_cm.yaml) + +___ +### Script output +`cmr "get rust-compiler " -j` +#### New environment keys (filter) + +* `+PATH` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/get-gcc.md b/docs/Compiler-automation/get-gcc.md new file mode 100644 index 000000000..66bb2f11f --- /dev/null +++ b/docs/Compiler-automation/get-gcc.md @@ -0,0 +1,154 @@ +Automatically generated README for this automation recipe: **get-gcc** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-gcc,dbf4ab5cbed74372) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,gcc,compiler,c-compiler,cpp-compiler,get-gcc* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get gcc compiler c-compiler cpp-compiler get-gcc" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,gcc,compiler,c-compiler,cpp-compiler,get-gcc` + +`cm run script --tags=get,gcc,compiler,c-compiler,cpp-compiler,get-gcc ` + +*or* + +`cmr "get gcc compiler c-compiler cpp-compiler get-gcc"` + +`cmr "get gcc compiler c-compiler cpp-compiler get-gcc " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,gcc,compiler,c-compiler,cpp-compiler,get-gcc' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,gcc,compiler,c-compiler,cpp-compiler,get-gcc"``` + +#### Run this script via Docker (beta) + +`cm docker script "get gcc compiler c-compiler cpp-compiler get-gcc" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/_cm.json)*** + * get,compiler-flags + - CM script: [get-compiler-flags](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-flags) + +___ +### Script output +`cmr "get gcc compiler c-compiler cpp-compiler get-gcc " -j` +#### New environment keys (filter) + +* `+ CFLAGS` +* `+ CXXFLAGS` +* `+ FFLAGS` +* `+ LDFLAGS` +* `+CM_HOST_OS_DEFAULT_INCLUDE_PATH` +* `+PATH` +* `CM_COMPILER_*` +* `CM_CXX_COMPILER_*` +* `CM_C_COMPILER_*` +* `CM_GCC_*` +* `CM_LINKER_*` +#### New environment keys auto-detected from customize + +* `CM_COMPILER_CACHE_TAGS` +* `CM_COMPILER_FAMILY` +* `CM_COMPILER_FLAGS_DEBUG` +* `CM_COMPILER_FLAGS_DEFAULT` +* `CM_COMPILER_FLAGS_FAST` +* `CM_COMPILER_VERSION` +* `CM_CXX_COMPILER_BIN` +* `CM_CXX_COMPILER_FLAG_OUTPUT` +* `CM_CXX_COMPILER_FLAG_VERSION` +* `CM_CXX_COMPILER_WITH_PATH` +* `CM_C_COMPILER_BIN` +* `CM_C_COMPILER_FLAG_OUTPUT` +* `CM_C_COMPILER_FLAG_VERSION` +* `CM_C_COMPILER_WITH_PATH` +* `CM_GCC_BIN` +* `CM_GCC_CACHE_TAGS` +* `CM_GCC_INSTALLED_PATH` +* `CM_LINKER_FLAGS_DEBUG` +* `CM_LINKER_FLAGS_DEFAULT` +* `CM_LINKER_FLAGS_FAST` \ No newline at end of file diff --git a/docs/Compiler-automation/get-go.md b/docs/Compiler-automation/get-go.md new file mode 100644 index 000000000..963eca057 --- /dev/null +++ b/docs/Compiler-automation/get-go.md @@ -0,0 +1,126 @@ +Automatically generated README for this automation recipe: **get-go** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-go,ab42647a96724a25) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,tool,go,get-go* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get tool go get-go" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,tool,go,get-go` + +`cm run script --tags=get,tool,go,get-go ` + +*or* + +`cmr "get tool go get-go"` + +`cmr "get tool go get-go " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,tool,go,get-go' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,tool,go,get-go"``` + +#### Run this script via Docker (beta) + +`cm docker script "get tool go get-go" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/_cm.json)*** + * install,go + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - *Warning: no scripts found* + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/_cm.json) + +___ +### Script output +`cmr "get tool go get-go " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_GO_*` +#### New environment keys auto-detected from customize + +* `CM_GO_CACHE_TAGS` +* `CM_GO_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/get-llvm.md b/docs/Compiler-automation/get-llvm.md new file mode 100644 index 000000000..8615f3831 --- /dev/null +++ b/docs/Compiler-automation/get-llvm.md @@ -0,0 +1,175 @@ +Automatically generated README for this automation recipe: **get-llvm** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-llvm,99832a103ed04eb8) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,llvm,compiler,c-compiler,cpp-compiler,get-llvm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get llvm compiler c-compiler cpp-compiler get-llvm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,llvm,compiler,c-compiler,cpp-compiler,get-llvm` + +`cm run script --tags=get,llvm,compiler,c-compiler,cpp-compiler,get-llvm[,variations] ` + +*or* + +`cmr "get llvm compiler c-compiler cpp-compiler get-llvm"` + +`cmr "get llvm compiler c-compiler cpp-compiler get-llvm [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,llvm,compiler,c-compiler,cpp-compiler,get-llvm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,llvm,compiler,c-compiler,cpp-compiler,get-llvm"``` + +#### Run this script via Docker (beta) + +`cm docker script "get llvm compiler c-compiler cpp-compiler get-llvm[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_from-prebuilt` + - Workflow: + * `_from-src` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/_cm.json)*** + * install,llvm + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + * CM names: `--adr.llvm-install...` + - CM script: [install-llvm-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-prebuilt) + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/_cm.json)*** + * get,compiler-flags + - CM script: [get-compiler-flags](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-flags) + +___ +### Script output +`cmr "get llvm compiler c-compiler cpp-compiler get-llvm [,variations]" -j` +#### New environment keys (filter) + +* `+ CFLAGS` +* `+ CXXFLAGS` +* `+ FFLAGS` +* `+ LDFLAGS` +* `+CM_HOST_OS_DEFAULT_INCLUDE_PATH` +* `+PATH` +* `CM_COMPILER_*` +* `CM_CXX_COMPILER_*` +* `CM_C_COMPILER_*` +* `CM_LINKER_*` +* `CM_LLVM_*` +#### New environment keys auto-detected from customize + +* `CM_COMPILER_CACHE_TAGS` +* `CM_COMPILER_FAMILY` +* `CM_COMPILER_FLAGS_DEBUG` +* `CM_COMPILER_FLAGS_DEFAULT` +* `CM_COMPILER_FLAGS_FAST` +* `CM_COMPILER_VERSION` +* `CM_CXX_COMPILER_BIN` +* `CM_CXX_COMPILER_FLAG_INCLUDE` +* `CM_CXX_COMPILER_FLAG_OUTPUT` +* `CM_CXX_COMPILER_FLAG_VERSION` +* `CM_CXX_COMPILER_WITH_PATH` +* `CM_C_COMPILER_BIN` +* `CM_C_COMPILER_FLAG_INCLUDE` +* `CM_C_COMPILER_FLAG_OUTPUT` +* `CM_C_COMPILER_FLAG_VERSION` +* `CM_C_COMPILER_WITH_PATH` +* `CM_LINKER_FLAGS_DEBUG` +* `CM_LINKER_FLAGS_DEFAULT` +* `CM_LINKER_FLAGS_FAST` +* `CM_LLVM_CLANG_BIN` +* `CM_LLVM_CLANG_CACHE_TAGS` \ No newline at end of file diff --git a/docs/Compiler-automation/install-gcc-src.md b/docs/Compiler-automation/install-gcc-src.md new file mode 100644 index 000000000..a4fa68a1c --- /dev/null +++ b/docs/Compiler-automation/install-gcc-src.md @@ -0,0 +1,127 @@ +Automatically generated README for this automation recipe: **install-gcc-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-gcc-src,faae0ebd6e1242db) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,gcc,src-gcc* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src gcc src-gcc" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,gcc,src-gcc` + +`cm run script --tags=install,src,gcc,src-gcc ` + +*or* + +`cmr "install src gcc src-gcc"` + +`cmr "install src gcc src-gcc " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,gcc,src-gcc' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,gcc,src-gcc"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src gcc src-gcc" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `12` + +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/_cm.json)*** + * get,gcc + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + +___ +### Script output +`cmr "install src gcc src-gcc " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-ipex-from-src.md b/docs/Compiler-automation/install-ipex-from-src.md new file mode 100644 index 000000000..9762178c4 --- /dev/null +++ b/docs/Compiler-automation/install-ipex-from-src.md @@ -0,0 +1,198 @@ +Automatically generated README for this automation recipe: **install-ipex-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-ipex-from-src,09364fff2bf04516) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,ipex,src-ipex* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src ipex src-ipex" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,ipex,src-ipex` + +`cm run script --tags=install,get,src,from.src,ipex,src-ipex[,variations] ` + +*or* + +`cmr "install get src from.src ipex src-ipex"` + +`cmr "install get src from.src ipex src-ipex [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,ipex,src-ipex' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,ipex,src-ipex"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src ipex src-ipex[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_for-intel-mlperf-inference-v3.1-gptj` + - Environment variables: + - *CM_CONDA_ENV*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,conda,_name.gptj-pt + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,generic,conda-package,_package.python + * CM names: `--adr.['conda-package', 'python3']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.wheel,_source.conda-forge + * CM names: `--adr.['conda-package', 'wheel']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.setuptools,_source.conda-forge + * CM names: `--adr.['conda-package', 'setuptools']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * install,llvm,src,_for-intel-mlperf-inference-v3.1-gptj + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/intel/intel-extension-for-pytorch`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/intel/intel-extension-for-pytorch` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/intel/intel-extension-for-pytorch` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,pytorch,from.src + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['pytorch']...` + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * get,git,repo + * CM names: `--adr.['ipex-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/_cm.json) + +___ +### Script output +`cmr "install get src from.src ipex src-ipex [,variations]" -j` +#### New environment keys (filter) + +* `CM_IPEX_*` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-llvm-prebuilt.md b/docs/Compiler-automation/install-llvm-prebuilt.md new file mode 100644 index 000000000..6d338b092 --- /dev/null +++ b/docs/Compiler-automation/install-llvm-prebuilt.md @@ -0,0 +1,137 @@ +Automatically generated README for this automation recipe: **install-llvm-prebuilt** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-llvm-prebuilt,cda9094971724a0a) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm` + +`cm run script --tags=install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm ` + +*or* + +`cmr "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm"` + +`cmr "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm"``` + +#### Run this script via Docker (beta) + +`cm docker script "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `15.0.6` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/_cm.json)*** + * get,llvm + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + +___ +### Script output +`cmr "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_COMPILER_NAME` +* `CM_LLVM_*` +#### New environment keys auto-detected from customize + +* `CM_LLVM_CLANG_BIN_WITH_PATH` +* `CM_LLVM_INSTALLED_PATH` +* `CM_LLVM_PACKAGE` \ No newline at end of file diff --git a/docs/Compiler-automation/install-llvm-src.md b/docs/Compiler-automation/install-llvm-src.md new file mode 100644 index 000000000..331fbea92 --- /dev/null +++ b/docs/Compiler-automation/install-llvm-src.md @@ -0,0 +1,292 @@ +Automatically generated README for this automation recipe: **install-llvm-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-llvm-src,2af16e9a6c5f4702) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,llvm,from.src,src-llvm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src llvm from.src src-llvm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,llvm,from.src,src-llvm` + +`cm run script --tags=install,src,llvm,from.src,src-llvm[,variations] ` + +*or* + +`cmr "install src llvm from.src src-llvm"` + +`cmr "install src llvm from.src src-llvm [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,llvm,from.src,src-llvm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,llvm,from.src,src-llvm"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src llvm from.src src-llvm[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_for-intel-mlperf-inference-v3.1-bert` + - Environment variables: + - *CM_LLVM_CONDA_ENV*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + * get,conda,_name.bert-pt + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,conda-package,_package.ncurses,_source.conda-forge + * CM names: `--adr.['conda-package', 'ncurses']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.ninja + * CM names: `--adr.['conda-package', 'ninja']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.cmake + * CM names: `--adr.['conda-package', 'cmake']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,conda-package,_package.llvm-openmp,_source.conda-forge + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,conda-package,_package.chardet + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.libstdcxx-ng,_source.conda-forge + * CM names: `--adr.['conda-package', 'libstdcxx-ng']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * `_for-intel-mlperf-inference-v3.1-gptj` + - Environment variables: + - *CM_LLVM_CONDA_ENV*: `yes` + - *CM_LLVM_16_INTEL_MLPERF_INFERENCE*: `yes` + - *USE_CUDA*: `0` + - *CUDA_VISIBLE_DEVICES*: `` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-sys-util,_g++-12 + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + * get,conda,_name.gptj-pt + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,generic,conda-package,_package.python + * CM names: `--adr.['conda-package', 'python']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,conda-package,_package.ncurses,_source.conda-forge + * CM names: `--adr.['conda-package', 'ncurses']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,conda-package,_package.chardet + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.libstdcxx-ng,_source.conda-forge + * CM names: `--adr.['conda-package', 'libstdcxx-ng']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.mkl,_source.intel + * CM names: `--adr.['conda-package', 'mkl']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.mkl-include,_source.intel + * CM names: `--adr.['conda-package', 'mkl-include']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.intel-openmp,_source.intel + * CM names: `--adr.['conda-package', 'intel-openmp']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.gperftools,_source.conda-forge + * CM names: `--adr.['conda-package', 'gperftools']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.pybind11,_source.conda-forge + * CM names: `--adr.['conda-package', 'pybind11']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic-python-lib,_custom-python,_package.torch,_url.git+https://github.com/pytorch/pytorch.git@927dc662386af052018212c7d01309a506fc94cd + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_custom-python,_package.setuptools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_custom-python,_package.neural-compressor,_url.git+https://github.com/intel/neural-compressor.git@a2931eaa4052eec195be3c79a13f7bfa23e54473 + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_full-history` + - Workflow: + * `_runtimes.#` + - Environment variables: + - *CM_LLVM_ENABLE_RUNTIMES*: `#` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**build-type**" +
+ Click here to expand this section. + + * `_debug` + - Environment variables: + - *CM_LLVM_BUILD_TYPE*: `debug` + - Workflow: + * **`_release`** (default) + - Environment variables: + - *CM_LLVM_BUILD_TYPE*: `release` + - Workflow: + +
+ + + * Group "**clang**" +
+ Click here to expand this section. + + * **`_clang`** (default) + - Environment variables: + - *CM_LLVM_ENABLE_PROJECTS*: `clang` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + +
+ + +#### Default variations + +`_clang,_release` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,cmake + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_LLVM_CONDA_ENV': ['yes']}` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,generic-sys-util,_ninja-build + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_LLVM_CONDA_ENV': ['yes']}` + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,git,repo + * CM names: `--adr.['llvm-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/_cm.json)*** + * get,llvm + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + +___ +### Script output +`cmr "install src llvm from.src src-llvm [,variations]" -j` +#### New environment keys (filter) + +* `+C_INCLUDE_PATH` +* `+PATH` +* `CM_GET_DEPENDENT_CACHED_PATH` +* `CM_LLVM_*` +#### New environment keys auto-detected from customize + +* `CM_GET_DEPENDENT_CACHED_PATH` +* `CM_LLVM_CLANG_BIN_WITH_PATH` +* `CM_LLVM_CMAKE_CMD` +* `CM_LLVM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/install-onednn-from-src.md b/docs/Compiler-automation/install-onednn-from-src.md new file mode 100644 index 000000000..3d9232a76 --- /dev/null +++ b/docs/Compiler-automation/install-onednn-from-src.md @@ -0,0 +1,181 @@ +Automatically generated README for this automation recipe: **install-onednn-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-onednn-from-src,fe3a652e315f4c8f) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,onednn,src-onednn* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src onednn src-onednn" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,onednn,src-onednn` + +`cm run script --tags=install,get,src,from.src,onednn,src-onednn[,variations] ` + +*or* + +`cmr "install get src from.src onednn src-onednn"` + +`cmr "install get src from.src onednn src-onednn [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,onednn,src-onednn' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,onednn,src-onednn"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src onednn src-onednn[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_for-intel-mlperf-inference-v3.1-bert` + - Environment variables: + - *CM_CONDA_ENV*: `yes` + - *CM_FOR_INTEL_MLPERF_INFERENCE*: `yes` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/oneapi-src/oneDNN`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/oneapi-src/oneDNN` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/oneapi-src/oneDNN` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,repo + * CM names: `--adr.['onednn-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run-intel-mlperf-inference.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/run-intel-mlperf-inference.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/_cm.json) + +___ +### Script output +`cmr "install get src from.src onednn src-onednn [,variations]" -j` +#### New environment keys (filter) + +* `CM_ONEDNN_*` +#### New environment keys auto-detected from customize + +* `CM_ONEDNN_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/install-onnxruntime-from-src.md b/docs/Compiler-automation/install-onnxruntime-from-src.md new file mode 100644 index 000000000..e99fa55e7 --- /dev/null +++ b/docs/Compiler-automation/install-onnxruntime-from-src.md @@ -0,0 +1,184 @@ +Automatically generated README for this automation recipe: **install-onnxruntime-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-onnxruntime-from-src,9798c7e7a5944cee) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,onnxruntime,src-onnxruntime* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src onnxruntime src-onnxruntime" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,onnxruntime,src-onnxruntime` + +`cm run script --tags=install,get,src,from.src,onnxruntime,src-onnxruntime[,variations] ` + +*or* + +`cmr "install get src from.src onnxruntime src-onnxruntime"` + +`cmr "install get src from.src onnxruntime src-onnxruntime [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,onnxruntime,src-onnxruntime' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,onnxruntime,src-onnxruntime"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src onnxruntime src-onnxruntime[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_ONNXRUNTIME_GPU*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * **`_repo.https://github.com/Microsoft/onnxruntime`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/Microsoft/onnxruntime` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/Microsoft/onnxruntime` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * fail,filter,_windows + - CM script: [fail](https://github.com/mlcommons/cm4mlops/tree/master/script/fail) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + * get,git,repo + * CM names: `--adr.['onnxruntime-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/_cm.json) + +___ +### Script output +`cmr "install get src from.src onnxruntime src-onnxruntime [,variations]" -j` +#### New environment keys (filter) + +* `CM_ONNXRUNTIME_*` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-pytorch-from-src.md b/docs/Compiler-automation/install-pytorch-from-src.md new file mode 100644 index 000000000..c63c6219f --- /dev/null +++ b/docs/Compiler-automation/install-pytorch-from-src.md @@ -0,0 +1,248 @@ +Automatically generated README for this automation recipe: **install-pytorch-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-pytorch-from-src,64eaf3e81de94f41) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,pytorch,src-pytorch* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src pytorch src-pytorch" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,pytorch,src-pytorch` + +`cm run script --tags=install,get,src,from.src,pytorch,src-pytorch[,variations] ` + +*or* + +`cmr "install get src from.src pytorch src-pytorch"` + +`cmr "install get src from.src pytorch src-pytorch [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,pytorch,src-pytorch' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,pytorch,src-pytorch"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src pytorch src-pytorch[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_cuda` + - Environment variables: + - *CUDA_HOME*: `<<>>` + - *CUDNN_LIBRARY_PATH*: `<<>>` + - *CUDNN_INCLUDE_PATH*: `<<>>` + - *CUDA_NVCC_EXECUTABLE*: `<<>>` + - *USE_CUDA*: `1` + - *USE_CUDNN*: `1` + - *TORCH_CUDA_ARCH_LIST*: `Ampere Ada Hopper` + - *TORCH_CXX_FLAGS*: `-D_GLIBCXX_USE_CXX11_ABI=1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_for-intel-mlperf-inference-v3.1-bert` + - Environment variables: + - *CM_CONDA_ENV*: `yes` + - *CM_MLPERF_INFERENCE_INTEL*: `yes` + - *USE_CUDA*: `0` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-sys-util,_libffi7 + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,conda,_name.bert-pt + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,generic,conda-package,_package.ncurses,_source.conda-forge + * CM names: `--adr.['conda-package', 'ncurses']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.python + * CM names: `--adr.['conda-package', 'python3']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * install,llvm,src,_tag.llvmorg-15.0.7,_runtimes.libcxx:libcxxabi:openmp,_clang,_release,_for-intel-mlperf-inference-v3.1-bert + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + * get,generic,conda-package,_package.ninja + * CM names: `--adr.['conda-package', 'ninja']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.cmake + * CM names: `--adr.['conda-package', 'cmake']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.mkl,_source.intel + * CM names: `--adr.['conda-package', 'mkl']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.mkl-include,_source.intel + * CM names: `--adr.['conda-package', 'mkl-include']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.intel-openmp,_source.intel + * CM names: `--adr.['conda-package', 'intel-openmp']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.llvm-openmp,_source.conda-forge + * CM names: `--adr.['conda-package', 'llvm-openmp']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.jemalloc,_source.conda-forge + * CM names: `--adr.['conda-package', 'jemalloc']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.wheel,_source.conda-forge + * CM names: `--adr.['conda-package', 'wheel']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.setuptools,_source.conda-forge + * CM names: `--adr.['conda-package', 'setuptools']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.future,_source.conda-forge + * CM names: `--adr.['conda-package', 'future']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.libstdcxx-ng,_source.conda-forge + * CM names: `--adr.['conda-package', 'libstdcxx-ng']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * `_for-nvidia-mlperf-inference-v3.1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/pytorch/pytorch`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/pytorch/pytorch` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/pytorch/pytorch` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,repo + * CM names: `--adr.['pytorch-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run-intel-mlperf-inference-v3_1.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/run-intel-mlperf-inference-v3_1.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/_cm.json) + +___ +### Script output +`cmr "install get src from.src pytorch src-pytorch [,variations]" -j` +#### New environment keys (filter) + +* `CM_PYTORCH_*` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-pytorch-kineto-from-src.md b/docs/Compiler-automation/install-pytorch-kineto-from-src.md new file mode 100644 index 000000000..5b38ea6ed --- /dev/null +++ b/docs/Compiler-automation/install-pytorch-kineto-from-src.md @@ -0,0 +1,191 @@ +Automatically generated README for this automation recipe: **install-pytorch-kineto-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-pytorch-kineto-from-src,98a4b061712d4483) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src pytorch-kineto kineto src-pytorch-kineto" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto` + +`cm run script --tags=install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto[,variations] ` + +*or* + +`cmr "install get src from.src pytorch-kineto kineto src-pytorch-kineto"` + +`cmr "install get src from.src pytorch-kineto kineto src-pytorch-kineto [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src pytorch-kineto kineto src-pytorch-kineto[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_cuda` + - Environment variables: + - *CUDA_HOME*: `<<>>` + - *CUDA_NVCC_EXECUTABLE*: `<<>>` + - *CUDNN_INCLUDE_PATH*: `<<>>` + - *CUDNN_LIBRARY_PATH*: `<<>>` + - *TORCH_CUDA_ARCH_LIST*: `Ampere Ada Hopper` + - *TORCH_CXX_FLAGS*: `-D_GLIBCXX_USE_CXX11_ABI=1` + - *USE_CUDA*: `1` + - *USE_CUDNN*: `1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/pytorch/kineto`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/pytorch/kineto` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/pytorch/kineto` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,git,repo + * CM names: `--adr.['pytorch-kineto-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/_cm.json) + +___ +### Script output +`cmr "install get src from.src pytorch-kineto kineto src-pytorch-kineto [,variations]" -j` +#### New environment keys (filter) + +* `CM_PYTORCH_KINETO_*` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-torchvision-from-src.md b/docs/Compiler-automation/install-torchvision-from-src.md new file mode 100644 index 000000000..c269b624b --- /dev/null +++ b/docs/Compiler-automation/install-torchvision-from-src.md @@ -0,0 +1,194 @@ +Automatically generated README for this automation recipe: **install-torchvision-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-torchvision-from-src,68b855780d474546) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src pytorchvision torchvision src-pytorchvision" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision` + +`cm run script --tags=install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision[,variations] ` + +*or* + +`cmr "install get src from.src pytorchvision torchvision src-pytorchvision"` + +`cmr "install get src from.src pytorchvision torchvision src-pytorchvision [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src pytorchvision torchvision src-pytorchvision[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_cuda` + - Environment variables: + - *CUDA_HOME*: `<<>>` + - *CUDA_NVCC_EXECUTABLE*: `<<>>` + - *CUDNN_INCLUDE_PATH*: `<<>>` + - *CUDNN_LIBRARY_PATH*: `<<>>` + - *USE_CUDA*: `1` + - *USE_CUDNN*: `1` + - *TORCH_CUDA_ARCH_LIST*: `Ampere Ada Hopper` + - *TORCH_CXX_FLAGS*: `-D_GLIBCXX_USE_CXX11_ABI=1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_for-nvidia-mlperf-inference-v3.1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * install,pytorch,from.src,_for-nvidia-mlperf-inference-v3.1 + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/pytorch/vision`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/pytorch/vision` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/pytorch/vision` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,repo + * CM names: `--adr.['pytorchision-src-repo', 'torchision-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/_cm.json) + +___ +### Script output +`cmr "install get src from.src pytorchvision torchvision src-pytorchvision [,variations]" -j` +#### New environment keys (filter) + +* `CM_PYTORCHVISION_*` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-tpp-pytorch-extension.md b/docs/Compiler-automation/install-tpp-pytorch-extension.md new file mode 100644 index 000000000..56669e10f --- /dev/null +++ b/docs/Compiler-automation/install-tpp-pytorch-extension.md @@ -0,0 +1,198 @@ +Automatically generated README for this automation recipe: **install-tpp-pytorch-extension** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-tpp-pytorch-extension,1701d2f5f4e84d42) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,tpp-pex,src-tpp-pex* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install get src from.src tpp-pex src-tpp-pex" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,get,src,from.src,tpp-pex,src-tpp-pex` + +`cm run script --tags=install,get,src,from.src,tpp-pex,src-tpp-pex[,variations] ` + +*or* + +`cmr "install get src from.src tpp-pex src-tpp-pex"` + +`cmr "install get src from.src tpp-pex src-tpp-pex [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,get,src,from.src,tpp-pex,src-tpp-pex' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,get,src,from.src,tpp-pex,src-tpp-pex"``` + +#### Run this script via Docker (beta) + +`cm docker script "install get src from.src tpp-pex src-tpp-pex[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_for-intel-mlperf-inference-v3.1-gptj` + - Environment variables: + - *CM_CONDA_ENV*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,conda,_name.gptj-pt + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,generic,conda-package,_package.python + * CM names: `--adr.['conda-package', 'python3']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.wheel,_source.conda-forge + * CM names: `--adr.['conda-package', 'wheel']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.setuptools,_source.conda-forge + * CM names: `--adr.['conda-package', 'setuptools']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * install,llvm,src,_for-intel-mlperf-inference-v3.1-gptj + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/libxsmm/tpp-pytorch-extension`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/libxsmm/tpp-pytorch-extension` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/libxsmm/tpp-pytorch-extension` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,pytorch,from.src + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['pytorch']...` + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * get,git,repo + * CM names: `--adr.['tpp-pex-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/_cm.json) + +___ +### Script output +`cmr "install get src from.src tpp-pex src-tpp-pex [,variations]" -j` +#### New environment keys (filter) + +* `CM_TPP_PEX_*` +#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-transformers-from-src.md b/docs/Compiler-automation/install-transformers-from-src.md new file mode 100644 index 000000000..0ac334c3c --- /dev/null +++ b/docs/Compiler-automation/install-transformers-from-src.md @@ -0,0 +1,196 @@ +Automatically generated README for this automation recipe: **install-transformers-from-src** + +Category: **Compiler automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-transformers-from-src,88512c48ea5c4186) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,from.src,transformers,src-transformers* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src from.src transformers src-transformers" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,from.src,transformers,src-transformers` + +`cm run script --tags=install,src,from.src,transformers,src-transformers[,variations] ` + +*or* + +`cmr "install src from.src transformers src-transformers"` + +`cmr "install src from.src transformers src-transformers [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,from.src,transformers,src-transformers' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,from.src,transformers,src-transformers"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src from.src transformers src-transformers[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_for-intel-mlperf-inference-v3.1-bert` + - Environment variables: + - *CM_CONDA_ENV*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,conda,_name.bert-pt + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,generic,conda-package,_package.python + * CM names: `--adr.['conda-package', 'python3']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.wheel,_source.conda-forge + * CM names: `--adr.['conda-package', 'wheel']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.setuptools,_source.conda-forge + * CM names: `--adr.['conda-package', 'setuptools']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/pytorch/pytorch`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/huggingface/transformers` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/pytorch/pytorch` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,pytorch,from.src + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CONDA_ENV': ['yes']}` + * CM names: `--adr.['pytorch']...` + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * get,git,repo + * CM names: `--adr.['transformers-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/_cm.json) + +___ +### Script output +`cmr "install src from.src transformers src-transformers [,variations]" -j` +#### New environment keys (filter) + +* `CM_TRANSFORMERS_*` +#### New environment keys auto-detected from customize diff --git a/docs/Dashboard-automation/publish-results-to-dashboard.md b/docs/Dashboard-automation/publish-results-to-dashboard.md new file mode 100644 index 000000000..d59985009 --- /dev/null +++ b/docs/Dashboard-automation/publish-results-to-dashboard.md @@ -0,0 +1,123 @@ +Automatically generated README for this automation recipe: **publish-results-to-dashboard** + +Category: **Dashboard automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=publish-results-to-dashboard,4af3a2d09f14412b) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *publish-results,dashboard* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "publish-results dashboard" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=publish-results,dashboard` + +`cm run script --tags=publish-results,dashboard ` + +*or* + +`cmr "publish-results dashboard"` + +`cmr "publish-results dashboard " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'publish-results,dashboard' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="publish-results,dashboard"``` + +#### Run this script via Docker (beta) + +`cm docker script "publish-results dashboard" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_wandb + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/_cm.json) + +___ +### Script output +`cmr "publish-results dashboard " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk.md new file mode 100644 index 000000000..45d8ea2db --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk.md @@ -0,0 +1,151 @@ +Automatically generated README for this automation recipe: **get-android-sdk** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-android-sdk,8c5b4b83d49c441a) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,android,sdk,android-sdk* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get android sdk android-sdk" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,android,sdk,android-sdk` + +`cm run script --tags=get,android,sdk,android-sdk [--input_flags]` + +*or* + +`cmr "get android sdk android-sdk"` + +`cmr "get android sdk android-sdk " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,android,sdk,android-sdk' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,android,sdk,android-sdk"``` + +#### Run this script via Docker (beta) + +`cm docker script "get android sdk android-sdk" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--android_cmake_version=value` → `CM_ANDROID_CMAKE_VERSION=value` +* `--android_ndk_version=value` → `CM_ANDROID_NDK_VERSION=value` +* `--android_version=value` → `CM_ANDROID_VERSION=value` +* `--build_tools_version=value` → `CM_ANDROID_BUILD_TOOLS_VERSION=value` +* `--cmdline_tools_version=value` → `CM_ANDROID_CMDLINE_TOOLS_VERSION=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "android_cmake_version":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ANDROID_BUILD_TOOLS_VERSION: `29.0.3` +* CM_ANDROID_CMAKE_VERSION: `3.6.4111459` +* CM_ANDROID_CMDLINE_TOOLS_URL: `https://dl.google.com/android/repository/commandlinetools-${CM_ANDROID_CMDLINE_TOOLS_OS}-${CM_ANDROID_CMDLINE_TOOLS_VERSION}_latest.zip` +* CM_ANDROID_CMDLINE_TOOLS_VERSION: `9123335` +* CM_ANDROID_NDK_VERSION: `21.3.6528147` +* CM_ANDROID_VERSION: `30` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,java + - CM script: [get-java](https://github.com/mlcommons/cm4mlops/tree/master/script/get-java) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/_cm.json) + +___ +### Script output +`cmr "get android sdk android-sdk " [--input_flags] -j` +#### New environment keys (filter) + +* `+PATH` +* `ANDROID_HOME` +* `ANDROID_NDK_HOME` +* `CM_ANDROID_HOME` +#### New environment keys auto-detected from customize + +* `CM_ANDROID_HOME` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-aria2.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-aria2.md new file mode 100644 index 000000000..daf2c012d --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-aria2.md @@ -0,0 +1,148 @@ +Automatically generated README for this automation recipe: **get-aria2** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-aria2,d83419a90a0c40d0) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,aria2,get-aria2* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get aria2 get-aria2" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,aria2,get-aria2` + +`cm run script --tags=get,aria2,get-aria2 [--input_flags]` + +*or* + +`cmr "get aria2 get-aria2"` + +`cmr "get aria2 get-aria2 " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,aria2,get-aria2' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,aria2,get-aria2"``` + +#### Run this script via Docker (beta) + +`cm docker script "get aria2 get-aria2" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--install=value` → `CM_FORCE_INSTALL=value` +* `--src=value` → `CM_ARIA2_BUILD_FROM_SRC=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "install":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/_cm.yaml)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/_cm.yaml) + +___ +### Script output +`cmr "get aria2 get-aria2 " [--input_flags] -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_ARIA2_*` +#### New environment keys auto-detected from customize + +* `CM_ARIA2_BIN_WITH_PATH` +* `CM_ARIA2_DOWNLOAD_DIR` +* `CM_ARIA2_DOWNLOAD_FILE` +* `CM_ARIA2_DOWNLOAD_FILE2` +* `CM_ARIA2_DOWNLOAD_URL` +* `CM_ARIA2_INSTALLED_PATH` +* `CM_ARIA2_INSTALLED_TO_CACHE` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-bazel.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-bazel.md new file mode 100644 index 000000000..d10d39230 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-bazel.md @@ -0,0 +1,127 @@ +Automatically generated README for this automation recipe: **get-bazel** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-bazel,eaef0be38bac493c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,bazel,get-bazel* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get bazel get-bazel" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,bazel,get-bazel` + +`cm run script --tags=get,bazel,get-bazel ` + +*or* + +`cmr "get bazel get-bazel"` + +`cmr "get bazel get-bazel " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,bazel,get-bazel' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,bazel,get-bazel"``` + +#### Run this script via Docker (beta) + +`cm docker script "get bazel get-bazel" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/_cm.json)*** + * install,bazel + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/install-bazel) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/_cm.json) + +___ +### Script output +`cmr "get bazel get-bazel " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_BAZEL_*` +#### New environment keys auto-detected from customize + +* `CM_BAZEL_CACHE_TAGS` +* `CM_BAZEL_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-blis.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-blis.md new file mode 100644 index 000000000..dad973bb2 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-blis.md @@ -0,0 +1,158 @@ +Automatically generated README for this automation recipe: **get-blis** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-blis,ea6e1cf75242456c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,lib,blis* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get lib blis" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,lib,blis` + +`cm run script --tags=get,lib,blis[,variations] ` + +*or* + +`cmr "get lib blis"` + +`cmr "get lib blis [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,lib,blis' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,lib,blis"``` + +#### Run this script via Docker (beta) + +`cm docker script "get lib blis[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**source**" +
+ Click here to expand this section. + + * `_amd` + - Workflow: + * **`_flame`** (default) + - Workflow: + +
+ + +#### Default variations + +`_flame` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `master` + +* `0.9.0` +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/_cm.json)*** + * get,git + * CM names: `--adr.['blis-source-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/_cm.json) + +___ +### Script output +`cmr "get lib blis [,variations]" -j` +#### New environment keys (filter) + +* `+LD_LIBRARY_PATH` +* `CM_BLIS_INC` +* `CM_BLIS_INSTALL_PATH` +* `CM_BLIS_LIB` +* `CM_BLIS_SRC_PATH` +#### New environment keys auto-detected from customize + +* `CM_BLIS_INC` +* `CM_BLIS_INSTALL_PATH` +* `CM_BLIS_LIB` +* `CM_BLIS_SRC_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-brew.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-brew.md new file mode 100644 index 000000000..7bd857bdb --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-brew.md @@ -0,0 +1,117 @@ +Automatically generated README for this automation recipe: **get-brew** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-brew,4a2c5eab1ccf484f) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,brew* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get brew" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,brew` + +`cm run script --tags=get,brew ` + +*or* + +`cmr "get brew"` + +`cmr "get brew " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,brew' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,brew"``` + +#### Run this script via Docker (beta) + +`cm docker script "get brew" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/_cm.json) + +___ +### Script output +`cmr "get brew " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-cmake.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-cmake.md new file mode 100644 index 000000000..cc5c63cd7 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-cmake.md @@ -0,0 +1,130 @@ +Automatically generated README for this automation recipe: **get-cmake** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cmake,52bf974d791b4fc8) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,cmake,get-cmake* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get cmake get-cmake" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,cmake,get-cmake` + +`cm run script --tags=get,cmake,get-cmake ` + +*or* + +`cmr "get cmake get-cmake"` + +`cmr "get cmake get-cmake " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,cmake,get-cmake' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,cmake,get-cmake"``` + +#### Run this script via Docker (beta) + +`cm docker script "get cmake get-cmake" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/_cm.json)*** + * install,cmake,prebuilt + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-cmake-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cmake-prebuilt) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/_cm.json) + +___ +### Script output +`cmr "get cmake get-cmake " -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_CMAKE_*` +* `CM_MAKE_CORES` +#### New environment keys auto-detected from customize + +* `CM_CMAKE_CACHE_TAGS` +* `CM_MAKE_CORES` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5.md new file mode 100644 index 000000000..6e90b97e0 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5.md @@ -0,0 +1,149 @@ +Automatically generated README for this automation recipe: **get-cmsis_5** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cmsis_5,2258c212b11443f5) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,cmsis,cmsis_5,arm-software* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get cmsis cmsis_5 arm-software" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,cmsis,cmsis_5,arm-software` + +`cm run script --tags=get,cmsis,cmsis_5,arm-software[,variations] ` + +*or* + +`cmr "get cmsis cmsis_5 arm-software"` + +`cmr "get cmsis cmsis_5 arm-software [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,cmsis,cmsis_5,arm-software' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,cmsis,cmsis_5,arm-software"``` + +#### Run this script via Docker (beta) + +`cm docker script "get cmsis cmsis_5 arm-software[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_recurse-submodules` + - Environment variables: + - *CM_GIT_RECURSE_SUBMODULES*: `--recurse-submodules` + - Workflow: + * `_short-history` + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 10` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_DEPTH: `` +* CM_GIT_PATCH: `no` +* CM_GIT_URL: `https://github.com/ARM-software/CMSIS_5.git` + +
+ +#### Versions +Default version: `custom` + +* `custom` +* `develop` +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/_cm.json) + +___ +### Script output +`cmr "get cmsis cmsis_5 arm-software [,variations]" -j` +#### New environment keys (filter) + +* `CMSIS*` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-docker.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-docker.md new file mode 100644 index 000000000..a836ce6de --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-docker.md @@ -0,0 +1,119 @@ +Automatically generated README for this automation recipe: **get-docker** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-docker,6192accce4234084) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,install,docker,engine* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get install docker engine" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,install,docker,engine` + +`cm run script --tags=get,install,docker,engine ` + +*or* + +`cmr "get install docker engine"` + +`cmr "get install docker engine " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,install,docker,engine' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,install,docker,engine"``` + +#### Run this script via Docker (beta) + +`cm docker script "get install docker engine" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/_cm.json) + 1. ***Run native script if exists*** + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/run-ubuntu.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/_cm.json) + +___ +### Script output +`cmr "get install docker engine " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util.md new file mode 100644 index 000000000..ef0ece7f9 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util.md @@ -0,0 +1,227 @@ +Automatically generated README for this automation recipe: **get-generic-sys-util** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-generic-sys-util,bb0393afa8404a11) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,sys-util,generic,generic-sys-util* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get sys-util generic generic-sys-util" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,sys-util,generic,generic-sys-util` + +`cm run script --tags=get,sys-util,generic,generic-sys-util[,variations] ` + +*or* + +`cmr "get sys-util generic generic-sys-util"` + +`cmr "get sys-util generic generic-sys-util [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,sys-util,generic,generic-sys-util' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,sys-util,generic,generic-sys-util"``` + +#### Run this script via Docker (beta) + +`cm docker script "get sys-util generic generic-sys-util[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_g++-12` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `g++12` + - Workflow: + * `_gflags-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `gflags-dev` + - Workflow: + * `_git-lfs` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `git-lfs` + - Workflow: + * `_glog-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `glog-dev` + - Workflow: + * `_libboost-all-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libboost-all-dev` + - Workflow: + * `_libffi7` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libffi7` + - Workflow: + * `_libgmock-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libgmock-dev` + - Workflow: + * `_libmpfr-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libmpfr-dev` + - Workflow: + * `_libnuma-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libnuma-dev` + - Workflow: + * `_libpci-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libpci-dev` + - Workflow: + * `_libre2-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libre2-dev` + - Workflow: + * `_libudev-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `libudev-dev` + - Workflow: + * `_ninja-build` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `ninja-build` + - Workflow: + * `_ntpdate` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `ntpdate` + - Workflow: + * `_numactl` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `numactl` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * install,numactl,from.src + * Enable this dependency only if all ENV vars are set:
+`{'CM_HOST_OS_FLAVOR': ['rhel'], 'CM_HOST_OS_VERSION': ['9.1', '9.2', '9.3']}` + - CM script: [install-numactl-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-numactl-from-src) + * `_nvidia-cuda-toolkit` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `nvidia-cuda-toolkit` + - Workflow: + * `_rapidjson-dev` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `rapidjson-dev` + - Workflow: + * `_rsync` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `rsync` + - Workflow: + * `_screen` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `screen` + - Workflow: + * `_sox` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `sox` + - Workflow: + * `_transmission` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `transmission` + - Workflow: + * `_zlib` + - Environment variables: + - *CM_SYS_UTIL_NAME*: `zlib` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_CLEAN_DIRS: `bin` +* CM_SUDO: `sudo` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/_cm.json) + +___ +### Script output +`cmr "get sys-util generic generic-sys-util [,variations]" -j` +#### New environment keys (filter) + +* `+PATH` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-google-test.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-google-test.md new file mode 100644 index 000000000..185960383 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-google-test.md @@ -0,0 +1,137 @@ +Automatically generated README for this automation recipe: **get-google-test** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-google-test,02945138a5614253) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,google-test,googletest,gtest,test,google* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get google-test googletest gtest test google" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,google-test,googletest,gtest,test,google` + +`cm run script --tags=get,google-test,googletest,gtest,test,google ` + +*or* + +`cmr "get google-test googletest gtest test google"` + +`cmr "get google-test googletest gtest test google " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,google-test,googletest,gtest,test,google' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,google-test,googletest,gtest,test,google"``` + +#### Run this script via Docker (beta) + +`cm docker script "get google-test googletest gtest test google" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `1.14.0` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/_cm.json)*** + * get,cmake + * CM names: `--adr.['cmake']...` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,compiler + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/_cm.json)*** + * get,git,repo,_repo.https://github.com/google/googletest.git + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/_cm.json) + +___ +### Script output +`cmr "get google-test googletest gtest test google " -j` +#### New environment keys (filter) + +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `CM_GOOGLE_TEST_INSTALL_PATH` +* `CM_GOOGLE_TEST_SRC_PATH` +#### New environment keys auto-detected from customize + +* `CM_GOOGLE_TEST_INSTALL_PATH` +* `CM_GOOGLE_TEST_SRC_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-java.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-java.md new file mode 100644 index 000000000..ae31b39ca --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-java.md @@ -0,0 +1,165 @@ +Automatically generated README for this automation recipe: **get-java** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-java,9399d0e785704f8c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,java* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get java" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,java` + +`cm run script --tags=get,java[,variations] [--input_flags]` + +*or* + +`cmr "get java"` + +`cmr "get java [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,java' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,java"``` + +#### Run this script via Docker (beta) + +`cm docker script "get java[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_install` + - Environment variables: + - *CM_JAVA_PREBUILT_INSTALL*: `on` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--install=value` → `CM_JAVA_PREBUILT_INSTALL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "install":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_JAVA_PREBUILT_VERSION: `19` +* CM_JAVA_PREBUILT_BUILD: `36` +* CM_JAVA_PREBUILT_URL: `https://download.java.net/openjdk/jdk${CM_JAVA_PREBUILT_VERSION}/ri/` +* CM_JAVA_PREBUILT_FILENAME: `openjdk-${CM_JAVA_PREBUILT_VERSION}+${CM_JAVA_PREBUILT_BUILD}_${CM_JAVA_PREBUILT_HOST_OS}-x64_bin` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/_cm.json) + +___ +### Script output +`cmr "get java [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_JAVA_*` +* `JAVA_HOME` +#### New environment keys auto-detected from customize + +* `CM_JAVA_BIN` +* `CM_JAVA_CACHE_TAGS` +* `CM_JAVA_PREBUILT_EXT` +* `CM_JAVA_PREBUILT_FILENAME` +* `CM_JAVA_PREBUILT_HOST_OS` +* `CM_JAVA_PREBUILT_URL` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-javac.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-javac.md new file mode 100644 index 000000000..fbb21c123 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-javac.md @@ -0,0 +1,168 @@ +Automatically generated README for this automation recipe: **get-javac** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-javac,509280c497b24226) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,javac* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get javac" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,javac` + +`cm run script --tags=get,javac[,variations] [--input_flags]` + +*or* + +`cmr "get javac"` + +`cmr "get javac [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,javac' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,javac"``` + +#### Run this script via Docker (beta) + +`cm docker script "get javac[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_install` + - Environment variables: + - *CM_JAVAC_PREBUILT_INSTALL*: `on` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--install=value` → `CM_JAVAC_PREBUILT_INSTALL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "install":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_JAVAC_PREBUILT_VERSION: `19` +* CM_JAVAC_PREBUILT_BUILD: `36` +* CM_JAVAC_PREBUILT_URL: `https://download.java.net/openjdk/jdk${CM_JAVAC_PREBUILT_VERSION}/ri/` +* CM_JAVAC_PREBUILT_FILENAME: `openjdk-${CM_JAVAC_PREBUILT_VERSION}+${CM_JAVAC_PREBUILT_BUILD}_${CM_JAVAC_PREBUILT_HOST_OS}-x64_bin` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/_cm.json) + +___ +### Script output +`cmr "get javac [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_JAVAC_*` +* `CM_JAVA_*` +* `JAVA_HOME` +#### New environment keys auto-detected from customize + +* `CM_JAVAC_BIN` +* `CM_JAVAC_CACHE_TAGS` +* `CM_JAVAC_PREBUILT_EXT` +* `CM_JAVAC_PREBUILT_FILENAME` +* `CM_JAVAC_PREBUILT_HOST_OS` +* `CM_JAVAC_PREBUILT_URL` +* `CM_JAVA_BIN` +* `CM_JAVA_BIN_WITH_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn.md new file mode 100644 index 000000000..745886d07 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn.md @@ -0,0 +1,132 @@ +Automatically generated README for this automation recipe: **get-lib-armnn** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-lib-armnn,9603a2e90fd44587) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,lib-armnn,lib,armnn* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get lib-armnn lib armnn" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,lib-armnn,lib,armnn` + +`cm run script --tags=get,lib-armnn,lib,armnn ` + +*or* + +`cmr "get lib-armnn lib armnn"` + +`cmr "get lib-armnn lib armnn " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,lib-armnn,lib,armnn' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,lib-armnn,lib,armnn"``` + +#### Run this script via Docker (beta) + +`cm docker script "get lib-armnn lib armnn" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `23.11` + +* `22.11` +* `23.05` +* `23.11` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/_cm.json)*** + * get,git,repo,_repo.https://github.com/ARM-software/armnn + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/_cm.json) + +___ +### Script output +`cmr "get lib-armnn lib armnn " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `CM_LIB_ARMNN_VERSION` +* `CM_LIB_DNNL_*` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl.md new file mode 100644 index 000000000..e12b39926 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl.md @@ -0,0 +1,132 @@ +Automatically generated README for this automation recipe: **get-lib-dnnl** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-lib-dnnl,1cd35a6a3b0b4530) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,lib-dnnl,lib,dnnl* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get lib-dnnl lib dnnl" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,lib-dnnl,lib,dnnl` + +`cm run script --tags=get,lib-dnnl,lib,dnnl ` + +*or* + +`cmr "get lib-dnnl lib dnnl"` + +`cmr "get lib-dnnl lib dnnl " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,lib-dnnl,lib,dnnl' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,lib-dnnl,lib,dnnl"``` + +#### Run this script via Docker (beta) + +`cm docker script "get lib-dnnl lib dnnl" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `dev` + +* `2.2.4` +* `dev` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * cmake,get-cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/_cm.json) + +___ +### Script output +`cmr "get lib-dnnl lib dnnl " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `CM_LIB_DNNL_*` +#### New environment keys auto-detected from customize + +* `CM_LIB_DNNL_INSTALL_DIR` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf.md new file mode 100644 index 000000000..80608d46d --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf.md @@ -0,0 +1,154 @@ +Automatically generated README for this automation recipe: **get-lib-protobuf** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-lib-protobuf,db45f1eb73934f91) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,google-protobuf,protobuf,lib,lib-protobuf,google* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get google-protobuf protobuf lib lib-protobuf google" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,google-protobuf,protobuf,lib,lib-protobuf,google` + +`cm run script --tags=get,google-protobuf,protobuf,lib,lib-protobuf,google[,variations] ` + +*or* + +`cmr "get google-protobuf protobuf lib lib-protobuf google"` + +`cmr "get google-protobuf protobuf lib lib-protobuf google [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,google-protobuf,protobuf,lib,lib-protobuf,google' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,google-protobuf,protobuf,lib,lib-protobuf,google"``` + +#### Run this script via Docker (beta) + +`cm docker script "get google-protobuf protobuf lib lib-protobuf google[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_TMP_GIT_CHECKOUT*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `1.13.0` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/_cm.json)*** + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/_cm.json)*** + * get,git,repo,_repo.https://github.com/google/protobuf.git + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/_cm.json) + +___ +### Script output +`cmr "get google-protobuf protobuf lib lib-protobuf google [,variations]" -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `CM_GOOGLE_PROTOBUF_INSTALL_PATH` +* `CM_GOOGLE_PROTOBUF_SRC_PATH` +#### New environment keys auto-detected from customize + +* `CM_GOOGLE_PROTOBUF_INSTALL_PATH` +* `CM_GOOGLE_PROTOBUF_SRC_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api.md new file mode 100644 index 000000000..243dc586f --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api.md @@ -0,0 +1,131 @@ +Automatically generated README for this automation recipe: **get-lib-qaic-api** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-lib-qaic-api,1e253ae184e44f23) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,api,lib-qaic-api,lib,qaic* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get api lib-qaic-api lib qaic" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,api,lib-qaic-api,lib,qaic` + +`cm run script --tags=get,api,lib-qaic-api,lib,qaic ` + +*or* + +`cmr "get api lib-qaic-api lib qaic"` + +`cmr "get api lib-qaic-api lib qaic " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,api,lib-qaic-api,lib,qaic' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,api,lib-qaic-api,lib,qaic"``` + +#### Run this script via Docker (beta) + +`cm docker script "get api lib-qaic-api lib qaic" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `master` + +* `master` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/_cm.json) + +___ +### Script output +`cmr "get api lib-qaic-api lib qaic " -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `CM_LIB_QAIC_*` +* `CM_QAIC_API_*` +#### New environment keys auto-detected from customize + +* `CM_QAIC_API_INC_FILE` +* `CM_QAIC_API_SRC_FILE` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker.md new file mode 100644 index 000000000..284218740 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker.md @@ -0,0 +1,121 @@ +Automatically generated README for this automation recipe: **get-nvidia-docker** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-nvidia-docker,465ae240998e4779) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get install nvidia nvidia-container-toolkit nvidia-docker engine" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine` + +`cm run script --tags=get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine ` + +*or* + +`cmr "get install nvidia nvidia-container-toolkit nvidia-docker engine"` + +`cmr "get install nvidia nvidia-container-toolkit nvidia-docker engine " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine"``` + +#### Run this script via Docker (beta) + +`cm docker script "get install nvidia nvidia-container-toolkit nvidia-docker engine" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,docker + - CM script: [get-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/get-docker) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/_cm.json) + 1. ***Run native script if exists*** + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/run-ubuntu.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/_cm.json) + +___ +### Script output +`cmr "get install nvidia nvidia-container-toolkit nvidia-docker engine " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-openssl.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-openssl.md new file mode 100644 index 000000000..a348f1272 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-openssl.md @@ -0,0 +1,125 @@ +Automatically generated README for this automation recipe: **get-openssl** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-openssl,febdae70e9e64e30) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,openssl,lib,lib-openssl* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get openssl lib lib-openssl" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,openssl,lib,lib-openssl` + +`cm run script --tags=get,openssl,lib,lib-openssl ` + +*or* + +`cmr "get openssl lib lib-openssl"` + +`cmr "get openssl lib lib-openssl " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,openssl,lib,lib-openssl' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,openssl,lib,lib-openssl"``` + +#### Run this script via Docker (beta) + +`cm docker script "get openssl lib lib-openssl" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/_cm.json)*** + * install,openssl + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/install-openssl) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/_cm.json) + +___ +### Script output +`cmr "get openssl lib lib-openssl " -j` +#### New environment keys (filter) + +* `+LD_LIBRARY_PATH` +* `CM_OPENSSL_*` +#### New environment keys auto-detected from customize + +* `CM_OPENSSL_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-rclone.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-rclone.md new file mode 100644 index 000000000..6dcd4adda --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-rclone.md @@ -0,0 +1,150 @@ +Automatically generated README for this automation recipe: **get-rclone** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-rclone,22ffb43c49c9419e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,rclone* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get rclone" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,rclone` + +`cm run script --tags=get,rclone[,variations] ` + +*or* + +`cmr "get rclone"` + +`cmr "get rclone [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,rclone' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,rclone"``` + +#### Run this script via Docker (beta) + +`cm docker script "get rclone[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_gdrive` + - Environment variables: + - *CM_RCLONE_GDRIVE*: `yes` + - Workflow: + * `_system` + - Environment variables: + - *CM_RCLONE_SYSTEM*: `yes` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `1.65.2` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/_cm.json) + +___ +### Script output +`cmr "get rclone [,variations]" -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_RCLONE_BIN_WITH_PATH` +* `CM_RCLONE_CACHE_TAGS` +* `CM_RCLONE_VERSION` +#### New environment keys auto-detected from customize + +* `CM_RCLONE_BIN_WITH_PATH` +* `CM_RCLONE_CACHE_TAGS` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm.md new file mode 100644 index 000000000..143e07b7d --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm.md @@ -0,0 +1,156 @@ +Automatically generated README for this automation recipe: **get-sys-utils-cm** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-sys-utils-cm,bc90993277e84b8e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,sys-utils-cm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get sys-utils-cm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,sys-utils-cm` + +`cm run script --tags=get,sys-utils-cm[,variations] [--input_flags]` + +*or* + +`cmr "get sys-utils-cm"` + +`cmr "get sys-utils-cm [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,sys-utils-cm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,sys-utils-cm"``` + +#### Run this script via Docker (beta) + +`cm docker script "get sys-utils-cm[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_user` + - Environment variables: + - *CM_PYTHON_PIP_USER*: `--user` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--skip=value` → `CM_SKIP_SYS_UTILS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "skip":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/_cm.yaml) + 1. ***Run native script if exists*** + * [run-arch.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-arch.sh) + * [run-debian.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-debian.sh) + * [run-macos.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-macos.sh) + * [run-rhel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-rhel.sh) + * [run-sles.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-sles.sh) + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-ubuntu.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/_cm.yaml) + +___ +### Script output +`cmr "get sys-utils-cm [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `+PATH` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min.md new file mode 100644 index 000000000..c200ad0d0 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min.md @@ -0,0 +1,117 @@ +Automatically generated README for this automation recipe: **get-sys-utils-min** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-sys-utils-min,a9af7714d3d94779) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,sys-utils-min* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get sys-utils-min" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,sys-utils-min` + +`cm run script --tags=get,sys-utils-min ` + +*or* + +`cmr "get sys-utils-min"` + +`cmr "get sys-utils-min " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,sys-utils-min' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,sys-utils-min"``` + +#### Run this script via Docker (beta) + +`cm docker script "get sys-utils-min" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/_cm.json) + +___ +### Script output +`cmr "get sys-utils-min " -j` +#### New environment keys (filter) + +* `+PATH` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk.md new file mode 100644 index 000000000..aa2f7dba5 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk.md @@ -0,0 +1,138 @@ +Automatically generated README for this automation recipe: **get-xilinx-sdk** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-xilinx-sdk,76d4d1bd09df4490) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,xilinx,sdk* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get xilinx sdk" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,xilinx,sdk` + +`cm run script --tags=get,xilinx,sdk [--input_flags]` + +*or* + +`cmr "get xilinx sdk"` + +`cmr "get xilinx sdk " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,xilinx,sdk' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,xilinx,sdk"``` + +#### Run this script via Docker (beta) + +`cm docker script "get xilinx sdk" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `CM_XILINX_SDK_FILE_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `2019.1` + +* `2019.1` +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/_cm.json) + +___ +### Script output +`cmr "get xilinx sdk " [--input_flags] -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_XILINX_*` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn.md new file mode 100644 index 000000000..acf66bf02 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn.md @@ -0,0 +1,127 @@ +Automatically generated README for this automation recipe: **get-zendnn** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-zendnn,d1c6feb0ee684b09) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,zendnn,amd,from.src* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get zendnn amd from.src" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,zendnn,amd,from.src` + +`cm run script --tags=get,zendnn,amd,from.src ` + +*or* + +`cmr "get zendnn amd from.src"` + +`cmr "get zendnn amd from.src " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,zendnn,amd,from.src' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,zendnn,amd,from.src"``` + +#### Run this script via Docker (beta) + +`cm docker script "get zendnn amd from.src" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/_cm.json)*** + * get,amd,aocl + * CM names: `--adr.['aocl']...` + - CM script: [get-aocl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aocl) + * get,lib,blis,_amd + - CM script: [get-blis](https://github.com/mlcommons/cm4mlops/tree/master/script/get-blis) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,git,_repo.https://github.com/amd/ZenDNN.git + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/_cm.json) + +___ +### Script output +`cmr "get zendnn amd from.src " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-bazel.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-bazel.md new file mode 100644 index 000000000..8787010ef --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/install-bazel.md @@ -0,0 +1,134 @@ +Automatically generated README for this automation recipe: **install-bazel** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-bazel,dfd3d2bf5b764175) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,script,bazel* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install script bazel" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,script,bazel` + +`cm run script --tags=install,script,bazel ` + +*or* + +`cmr "install script bazel"` + +`cmr "install script bazel " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,script,bazel' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,script,bazel"``` + +#### Run this script via Docker (beta) + +`cm docker script "install script bazel" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `7.0.2` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/_cm.json) + 1. ***Run native script if exists*** + * [run-aarch64.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/run-aarch64.sh) + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/_cm.json)*** + * get,bazel + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bazel) + +___ +### Script output +`cmr "install script bazel " -j` +#### New environment keys (filter) + +* `CM_BAZEL_*` +#### New environment keys auto-detected from customize + +* `CM_BAZEL_BIN_WITH_PATH` +* `CM_BAZEL_DOWNLOAD_FILE` +* `CM_BAZEL_DOWNLOAD_URL` +* `CM_BAZEL_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt.md new file mode 100644 index 000000000..af835b71b --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt.md @@ -0,0 +1,136 @@ +Automatically generated README for this automation recipe: **install-cmake-prebuilt** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-cmake-prebuilt,5a39ef05992b4103) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake` + +`cm run script --tags=install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake ` + +*or* + +`cmr "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake"` + +`cmr "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake"``` + +#### Run this script via Docker (beta) + +`cm docker script "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `3.28.3` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/_cm.json) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/_cm.json)*** + * get,cmake + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + +___ +### Script output +`cmr "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake " -j` +#### New environment keys (filter) + +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_CMAKE_*` +* `CM_GET_DEPENDENT_CACHED_PATH` +#### New environment keys auto-detected from customize + +* `CM_CMAKE_BIN_WITH_PATH` +* `CM_CMAKE_INSTALLED_PATH` +* `CM_CMAKE_PACKAGE` +* `CM_GET_DEPENDENT_CACHED_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-gflags.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-gflags.md new file mode 100644 index 000000000..dc4ab3c75 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/install-gflags.md @@ -0,0 +1,127 @@ +Automatically generated README for this automation recipe: **install-gflags** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-gflags,10bb562c29ea459e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,get,gflags* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src get gflags" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,get,gflags` + +`cm run script --tags=install,src,get,gflags ` + +*or* + +`cmr "install src get gflags"` + +`cmr "install src get gflags " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,get,gflags' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,get,gflags"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src get gflags" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `2.2.2` + +* `2.2.2` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/_cm.json) + +___ +### Script output +`cmr "install src get gflags " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli.md new file mode 100644 index 000000000..65cfb01cb --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli.md @@ -0,0 +1,121 @@ +Automatically generated README for this automation recipe: **install-github-cli** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-github-cli,cd948ec309344bf8) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,gh,github,cli,github-cli* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install gh github cli github-cli" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,gh,github,cli,github-cli` + +`cm run script --tags=install,gh,github,cli,github-cli ` + +*or* + +`cmr "install gh github cli github-cli"` + +`cmr "install gh github cli github-cli " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,gh,github,cli,github-cli' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,gh,github,cli,github-cli"``` + +#### Run this script via Docker (beta) + +`cm docker script "install gh github cli github-cli" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/_cm.json) + 1. ***Run native script if exists*** + * [run-macos.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/run-macos.sh) + * [run-rhel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/run-rhel.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/_cm.json) + +___ +### Script output +`cmr "install gh github cli github-cli " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src.md new file mode 100644 index 000000000..dd69d6889 --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src.md @@ -0,0 +1,170 @@ +Automatically generated README for this automation recipe: **install-numactl-from-src** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-numactl-from-src,4f355ae8ca1948b2) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,from.src,numactl,src-numactl* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src from.src numactl src-numactl" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,from.src,numactl,src-numactl` + +`cm run script --tags=install,src,from.src,numactl,src-numactl[,variations] ` + +*or* + +`cmr "install src from.src numactl src-numactl"` + +`cmr "install src from.src numactl src-numactl [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,from.src,numactl,src-numactl' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,from.src,numactl,src-numactl"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src from.src numactl src-numactl[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_CHECKOUT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * **`_repo.https://github.com/numactl/numactl`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/numactl/numactl` + - Workflow: + +
+ + +#### Default variations + +`_repo.https://github.com/numactl/numactl` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,git,repo + * CM names: `--adr.['numactl-src-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/_cm.json) + +___ +### Script output +`cmr "install src from.src numactl src-numactl [,variations]" -j` +#### New environment keys (filter) + +* `+PATH` +* `CM_NUMACTL_*` +#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-openssl.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-openssl.md new file mode 100644 index 000000000..7aec6efce --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/install-openssl.md @@ -0,0 +1,134 @@ +Automatically generated README for this automation recipe: **install-openssl** + +Category: **Detection or installation of tools and artifacts** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-openssl,be472d3b1d014169) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,openssl,openssl-lib* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src openssl openssl-lib" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,openssl,openssl-lib` + +`cm run script --tags=install,src,openssl,openssl-lib ` + +*or* + +`cmr "install src openssl openssl-lib"` + +`cmr "install src openssl openssl-lib " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,openssl,openssl-lib' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,openssl,openssl-lib"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src openssl openssl-lib" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `1.1.1` + +* `1.1.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/_cm.json)*** + * get,openssl + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [get-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-openssl) + +___ +### Script output +`cmr "install src openssl openssl-lib " -j` +#### New environment keys (filter) + +* `+LD_LIBRARY_PATH` +* `CM_OPENSSL_*` +#### New environment keys auto-detected from customize + +* `CM_OPENSSL_BIN_WITH_PATH` +* `CM_OPENSSL_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/benchmark-program.md b/docs/DevOps-automation/benchmark-program.md new file mode 100644 index 000000000..cd0bbeba3 --- /dev/null +++ b/docs/DevOps-automation/benchmark-program.md @@ -0,0 +1,151 @@ +Automatically generated README for this automation recipe: **benchmark-program** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=benchmark-program,19f369ef47084895) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *benchmark,program* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "benchmark program" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=benchmark,program` + +`cm run script --tags=benchmark,program[,variations] ` + +*or* + +`cmr "benchmark program"` + +`cmr "benchmark program [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'benchmark,program' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="benchmark,program"``` + +#### Run this script via Docker (beta) + +`cm docker script "benchmark program[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_numactl` + - Workflow: + * `_numactl-interleave` + - Workflow: + * `_profile` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,profiler + - *Warning: no scripts found* + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ENABLE_NUMACTL: `0` +* CM_ENABLE_PROFILING: `0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * set,performance,mode,_performance + * Enable this dependency only if all ENV vars are set:
+`{'CM_SET_PERFORMANCE_MODE': ['on', 'yes', 'True', True]}` + - CM script: [set-performance-mode](https://github.com/mlcommons/cm4mlops/tree/master/script/set-performance-mode) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/_cm.json) + 1. ***Run native script if exists*** + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/run-ubuntu.sh) + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/_cm.json) + +___ +### Script output +`cmr "benchmark program [,variations]" -j` +#### New environment keys (filter) + +* `CM_RUN_CMD` +#### New environment keys auto-detected from customize + +* `CM_RUN_CMD` \ No newline at end of file diff --git a/docs/DevOps-automation/compile-program.md b/docs/DevOps-automation/compile-program.md new file mode 100644 index 000000000..057fdfe98 --- /dev/null +++ b/docs/DevOps-automation/compile-program.md @@ -0,0 +1,128 @@ +Automatically generated README for this automation recipe: **compile-program** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=compile-program,c05042ba005a4bfa) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program` + +`cm run script --tags=compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program ` + +*or* + +`cmr "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program"` + +`cmr "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program"``` + +#### Run this script via Docker (beta) + +`cm docker script "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* SKIP_RECOMPILE: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/_cm.json)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,compiler + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + * get,compiler-flags + - CM script: [get-compiler-flags](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-flags) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/_cm.json) + +___ +### Script output +`cmr "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/convert-csv-to-md.md b/docs/DevOps-automation/convert-csv-to-md.md new file mode 100644 index 000000000..129d8588b --- /dev/null +++ b/docs/DevOps-automation/convert-csv-to-md.md @@ -0,0 +1,143 @@ +Automatically generated README for this automation recipe: **convert-csv-to-md** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=convert-csv-to-md,200a95b80bee4a25) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *csv-to-md,convert,to-md,from-csv* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "csv-to-md convert to-md from-csv" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=csv-to-md,convert,to-md,from-csv` + +`cm run script --tags=csv-to-md,convert,to-md,from-csv [--input_flags]` + +*or* + +`cmr "csv-to-md convert to-md from-csv"` + +`cmr "csv-to-md convert to-md from-csv " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'csv-to-md,convert,to-md,from-csv' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="csv-to-md,convert,to-md,from-csv"``` + +#### Run this script via Docker (beta) + +`cm docker script "csv-to-md convert to-md from-csv" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--csv_file=value` → `CM_CSV_FILE=value` +* `--md_file=value` → `CM_MD_FILE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "csv_file":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/_cm.json)*** + * get,python3 + * CM names: `--adr.['python, python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_pandas + * CM names: `--adr.['pandas']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.tabulate + * CM names: `--adr.['tabulate']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/_cm.json) + +___ +### Script output +`cmr "csv-to-md convert to-md from-csv " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/copy-to-clipboard.md b/docs/DevOps-automation/copy-to-clipboard.md new file mode 100644 index 000000000..7122a24c0 --- /dev/null +++ b/docs/DevOps-automation/copy-to-clipboard.md @@ -0,0 +1,141 @@ +Automatically generated README for this automation recipe: **copy-to-clipboard** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=copy-to-clipboard,8b3aaa97ce58474d) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *copy,to,clipboard,copy-to-clipboard* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "copy to clipboard copy-to-clipboard" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=copy,to,clipboard,copy-to-clipboard` + +`cm run script --tags=copy,to,clipboard,copy-to-clipboard [--input_flags]` + +*or* + +`cmr "copy to clipboard copy-to-clipboard"` + +`cmr "copy to clipboard copy-to-clipboard " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'copy,to,clipboard,copy-to-clipboard' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="copy,to,clipboard,copy-to-clipboard"``` + +#### Run this script via Docker (beta) + +`cm docker script "copy to clipboard copy-to-clipboard" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--add_quotes=value` → `CM_COPY_TO_CLIPBOARD_TEXT_ADD_QUOTES=value` +* `--q=value` → `CM_COPY_TO_CLIPBOARD_TEXT_ADD_QUOTES=value` +* `--t=value` → `CM_COPY_TO_CLIPBOARD_TEXT=value` +* `--text=value` → `CM_COPY_TO_CLIPBOARD_TEXT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "add_quotes":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/_cm.yaml)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_package.pyperclip + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/_cm.yaml) + +___ +### Script output +`cmr "copy to clipboard copy-to-clipboard " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/create-conda-env.md b/docs/DevOps-automation/create-conda-env.md new file mode 100644 index 000000000..f97bb0f5a --- /dev/null +++ b/docs/DevOps-automation/create-conda-env.md @@ -0,0 +1,148 @@ +Automatically generated README for this automation recipe: **create-conda-env** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=create-conda-env,e39e0b04c86a40f2) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *create,get,env,conda-env,conda-environment,create-conda-environment* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "create get env conda-env conda-environment create-conda-environment" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=create,get,env,conda-env,conda-environment,create-conda-environment` + +`cm run script --tags=create,get,env,conda-env,conda-environment,create-conda-environment[,variations] ` + +*or* + +`cmr "create get env conda-env conda-environment create-conda-environment"` + +`cmr "create get env conda-env conda-environment create-conda-environment [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'create,get,env,conda-env,conda-environment,create-conda-environment' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="create,get,env,conda-env,conda-environment,create-conda-environment"``` + +#### Run this script via Docker (beta) + +`cm docker script "create get env conda-env conda-environment create-conda-environment[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_name.#` + - Environment variables: + - *CM_CONDA_ENV_NAME*: `#` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,conda + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/_cm.json) + +___ +### Script output +`cmr "create get env conda-env conda-environment create-conda-environment [,variations]" -j` +#### New environment keys (filter) + +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_CONDA_BIN_PATH` +* `CM_CONDA_LIB_PATH` +* `CM_CONDA_PREFIX` +* `CONDA_PREFIX` +#### New environment keys auto-detected from customize + +* `CM_CONDA_BIN_PATH` +* `CM_CONDA_LIB_PATH` +* `CM_CONDA_PREFIX` \ No newline at end of file diff --git a/docs/DevOps-automation/create-patch.md b/docs/DevOps-automation/create-patch.md new file mode 100644 index 000000000..664c378f5 --- /dev/null +++ b/docs/DevOps-automation/create-patch.md @@ -0,0 +1,135 @@ +Automatically generated README for this automation recipe: **create-patch** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=create-patch,0659dc1f75664c65) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *create,patch* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "create patch" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=create,patch` + +`cm run script --tags=create,patch [--input_flags]` + +*or* + +`cmr "create patch"` + +`cmr "create patch " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'create,patch' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="create,patch"``` + +#### Run this script via Docker (beta) + +`cm docker script "create patch" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--exclude=value` → `CM_CREATE_PATCH_EXCLUDE=value` +* `--new=value` → `CM_CREATE_PATCH_NEW=value` +* `--old=value` → `CM_CREATE_PATCH_OLD=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "exclude":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/_cm.yaml) + +___ +### Script output +`cmr "create patch " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/detect-sudo.md b/docs/DevOps-automation/detect-sudo.md new file mode 100644 index 000000000..49e48b530 --- /dev/null +++ b/docs/DevOps-automation/detect-sudo.md @@ -0,0 +1,120 @@ +Automatically generated README for this automation recipe: **detect-sudo** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=detect-sudo,1d47ffc556e248dc) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *detect,sudo,access* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "detect sudo access" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=detect,sudo,access` + +`cm run script --tags=detect,sudo,access ` + +*or* + +`cmr "detect sudo access"` + +`cmr "detect sudo access " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'detect,sudo,access' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="detect,sudo,access"``` + +#### Run this script via Docker (beta) + +`cm docker script "detect sudo access" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/_cm.yaml) + +___ +### Script output +`cmr "detect sudo access " -j` +#### New environment keys (filter) + +* `CM_SUDO_*` +#### New environment keys auto-detected from customize + +* `CM_SUDO_USER` \ No newline at end of file diff --git a/docs/DevOps-automation/download-and-extract.md b/docs/DevOps-automation/download-and-extract.md new file mode 100644 index 000000000..1d802285e --- /dev/null +++ b/docs/DevOps-automation/download-and-extract.md @@ -0,0 +1,216 @@ +Automatically generated README for this automation recipe: **download-and-extract** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=download-and-extract,c67e81a4ce2649f5) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *download-and-extract,file* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "download-and-extract file" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=download-and-extract,file` + +`cm run script --tags=download-and-extract,file[,variations] [--input_flags]` + +*or* + +`cmr "download-and-extract file"` + +`cmr "download-and-extract file [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'download-and-extract,file' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="download-and-extract,file"``` + +#### Run this script via Docker (beta) + +`cm docker script "download-and-extract file[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_extract` + - Environment variables: + - *CM_DAE_EXTRACT_DOWNLOADED*: `yes` + - Workflow: + * `_keep` + - Environment variables: + - *CM_EXTRACT_REMOVE_EXTRACTED*: `no` + - Workflow: + * `_no-remove-extracted` + - Environment variables: + - *CM_EXTRACT_REMOVE_EXTRACTED*: `no` + - Workflow: + * `_url.#` + - Environment variables: + - *CM_DAE_URL*: `#` + - Workflow: + +
+ + + * Group "**download-tool**" +
+ Click here to expand this section. + + * **`_cmutil`** (default) + - Workflow: + * `_curl` + - Workflow: + * `_gdown` + - Workflow: + * `_rclone` + - Workflow: + * `_torrent` + - Environment variables: + - *CM_DAE_DOWNLOAD_USING_TORRENT*: `yes` + - *CM_TORRENT_DOWNLOADED_FILE_NAME*: `<<>>` + - *CM_TORRENT_DOWNLOADED_PATH_ENV_KEY*: `CM_DAE_FILEPATH` + - *CM_TORRENT_WAIT_UNTIL_COMPLETED*: `yes` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * download,torrent + - CM script: [download-torrent](https://github.com/mlcommons/cm4mlops/tree/master/script/download-torrent) + * `_wget` + - Workflow: + +
+ + +#### Default variations + +`_cmutil` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--download_path=value` → `CM_DOWNLOAD_PATH=value` +* `--extra_folder=value` → `CM_EXTRACT_TO_FOLDER=value` +* `--extract_path=value` → `CM_EXTRACT_PATH=value` +* `--from=value` → `CM_DOWNLOAD_LOCAL_FILE_PATH=value` +* `--local_path=value` → `CM_DOWNLOAD_LOCAL_FILE_PATH=value` +* `--store=value` → `CM_DOWNLOAD_PATH=value` +* `--to=value` → `CM_EXTRACT_PATH=value` +* `--url=value` → `CM_DAE_URL=value` +* `--verify=value` → `CM_VERIFY_SSL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "download_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/_cm.json)*** + * download,file + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DAE_DOWNLOAD_USING_TORRENT': ['yes', 'True']}` + * CM names: `--adr.['download-script']...` + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. ***Run native script if exists*** + 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/_cm.json)*** + * extract,file + * Enable this dependency only if all ENV vars are set:
+`{'CM_DAE_EXTRACT_DOWNLOADED': ['yes', 'True']}` + * CM names: `--adr.['extract-script']...` + - CM script: [extract-file](https://github.com/mlcommons/cm4mlops/tree/master/script/extract-file) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/_cm.json) + +___ +### Script output +`cmr "download-and-extract file [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `<<>>` +* `<<>>` +* `CM_DOWNLOAD_DOWNLOADED_PATH*` +* `CM_EXTRACT_EXTRACTED_PATH` +* `CM_GET_DEPENDENT_CACHED_PATH` +#### New environment keys auto-detected from customize + +* `CM_GET_DEPENDENT_CACHED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/download-file.md b/docs/DevOps-automation/download-file.md new file mode 100644 index 000000000..73df26d09 --- /dev/null +++ b/docs/DevOps-automation/download-file.md @@ -0,0 +1,202 @@ +Automatically generated README for this automation recipe: **download-file** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=download-file,9cdc8dc41aae437e) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *download,file* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "download file" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=download,file` + +`cm run script --tags=download,file[,variations] [--input_flags]` + +*or* + +`cmr "download file"` + +`cmr "download file [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'download,file' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="download,file"``` + +#### Run this script via Docker (beta) + +`cm docker script "download file[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_url.#` + - Environment variables: + - *CM_DOWNLOAD_URL*: `#` + - Workflow: + +
+ + + * Group "**download-tool**" +
+ Click here to expand this section. + + * **`_cmutil`** (default) + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `cmutil` + - Workflow: + * `_curl` + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `curl` + - Workflow: + * `_gdown` + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `gdown` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.gdown + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_rclone` + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `rclone` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,rclone + - CM script: [get-rclone](https://github.com/mlcommons/cm4mlops/tree/master/script/get-rclone) + * `_wget` + - Environment variables: + - *CM_DOWNLOAD_TOOL*: `wget` + - Workflow: + +
+ + +#### Default variations + +`_cmutil` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--download_path=value` → `CM_DOWNLOAD_PATH=value` +* `--from=value` → `CM_DOWNLOAD_LOCAL_FILE_PATH=value` +* `--local_path=value` → `CM_DOWNLOAD_LOCAL_FILE_PATH=value` +* `--md5sum=value` → `CM_DOWNLOAD_CHECKSUM=value` +* `--store=value` → `CM_DOWNLOAD_PATH=value` +* `--url=value` → `CM_DOWNLOAD_URL=value` +* `--verify=value` → `CM_VERIFY_SSL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "download_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_RCLONE_COPY_USING: `sync` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/_cm.json) + +___ +### Script output +`cmr "download file [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `<<>>` +* `CM_DOWNLOAD_DOWNLOADED_PATH` +* `CM_GET_DEPENDENT_CACHED_PATH` +#### New environment keys auto-detected from customize + +* `CM_DOWNLOAD_DOWNLOADED_PATH` +* `CM_GET_DEPENDENT_CACHED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/download-torrent.md b/docs/DevOps-automation/download-torrent.md new file mode 100644 index 000000000..e14037e1d --- /dev/null +++ b/docs/DevOps-automation/download-torrent.md @@ -0,0 +1,155 @@ +Automatically generated README for this automation recipe: **download-torrent** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=download-torrent,69b752c5618e45bb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *download,torrent,download-torrent* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "download torrent download-torrent" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=download,torrent,download-torrent` + +`cm run script --tags=download,torrent,download-torrent[,variations] [--input_flags]` + +*or* + +`cmr "download torrent download-torrent"` + +`cmr "download torrent download-torrent [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'download,torrent,download-torrent' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="download,torrent,download-torrent"``` + +#### Run this script via Docker (beta) + +`cm docker script "download torrent download-torrent[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_torrent.#` + - Environment variables: + - *CM_TORRENT_FILE*: `#` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--wait=value` → `CM_TORRENT_WAIT_UNTIL_COMPLETED=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "wait":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_TORRENT_WAIT_UNTIL_COMPLETED: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/_cm.json)*** + * get,generic-sys-util,_transmission + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/_cm.json) + +___ +### Script output +`cmr "download torrent download-torrent [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `<<>>` +* `CM_TORRENT_DOWNLOADED_PATH` +#### New environment keys auto-detected from customize + +* `CM_TORRENT_DOWNLOADED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/extract-file.md b/docs/DevOps-automation/extract-file.md new file mode 100644 index 000000000..a9df0d22e --- /dev/null +++ b/docs/DevOps-automation/extract-file.md @@ -0,0 +1,168 @@ +Automatically generated README for this automation recipe: **extract-file** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=extract-file,3f0b76219d004817) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *extract,file* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "extract file" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=extract,file` + +`cm run script --tags=extract,file[,variations] [--input_flags]` + +*or* + +`cmr "extract file"` + +`cmr "extract file [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'extract,file' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="extract,file"``` + +#### Run this script via Docker (beta) + +`cm docker script "extract file[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_keep` + - Environment variables: + - *CM_EXTRACT_REMOVE_EXTRACTED*: `no` + - Workflow: + * `_no-remove-extracted` + - Environment variables: + - *CM_EXTRACT_REMOVE_EXTRACTED*: `no` + - Workflow: + * `_path.#` + - Environment variables: + - *CM_EXTRACT_FILEPATH*: `#` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--extra_folder=value` → `CM_EXTRACT_TO_FOLDER=value` +* `--extract_path=value` → `CM_EXTRACT_PATH=value` +* `--input=value` → `CM_EXTRACT_FILEPATH=value` +* `--to=value` → `CM_EXTRACT_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "extra_folder":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/_cm.json) + +___ +### Script output +`cmr "extract file [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `<<>>` +* `CM_EXTRACT_EXTRACTED_PATH` +* `CM_GET_DEPENDENT_CACHED_PATH` +#### New environment keys auto-detected from customize + +* `CM_EXTRACT_EXTRACTED_PATH` +* `CM_GET_DEPENDENT_CACHED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/fail.md b/docs/DevOps-automation/fail.md new file mode 100644 index 000000000..6784dbba7 --- /dev/null +++ b/docs/DevOps-automation/fail.md @@ -0,0 +1,132 @@ +Automatically generated README for this automation recipe: **fail** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=fail,3aaee82e19d243cd) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *fail,filter* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "fail filter" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=fail,filter` + +`cm run script --tags=fail,filter[,variations] ` + +*or* + +`cmr "fail filter"` + +`cmr "fail filter [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'fail,filter' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="fail,filter"``` + +#### Run this script via Docker (beta) + +`cm docker script "fail filter[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_windows` + - Environment variables: + - *CM_FAIL_WINDOWS*: `True` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/_cm.yaml) + +___ +### Script output +`cmr "fail filter [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/get-conda.md b/docs/DevOps-automation/get-conda.md new file mode 100644 index 000000000..6999e6a00 --- /dev/null +++ b/docs/DevOps-automation/get-conda.md @@ -0,0 +1,164 @@ +Automatically generated README for this automation recipe: **get-conda** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-conda,6600115f41324c7b) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,conda,get-conda* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get conda get-conda" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,conda,get-conda` + +`cm run script --tags=get,conda,get-conda[,variations] ` + +*or* + +`cmr "get conda get-conda"` + +`cmr "get conda get-conda [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,conda,get-conda' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,conda,get-conda"``` + +#### Run this script via Docker (beta) + +`cm docker script "get conda get-conda[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_name.#` + - Environment variables: + - *CM_CONDA_PREFIX_NAME*: `#` + - Workflow: + +
+ + + * Group "**conda-python**" +
+ Click here to expand this section. + + * `_python-3.#` + - Environment variables: + - *CM_CONDA_PYTHON_VERSION*: `3.#` + - Workflow: + * `_python-3.8` + - Environment variables: + - *CM_CONDA_PYTHON_VERSION*: `3.8` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/_cm.json) + +___ +### Script output +`cmr "get conda get-conda [,variations]" -j` +#### New environment keys (filter) + +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_CONDA_BIN_PATH` +* `CM_CONDA_BIN_WITH_PATH` +* `CM_CONDA_LIB_PATH` +* `CM_CONDA_PREFIX` +* `CONDA_PREFIX` +#### New environment keys auto-detected from customize + +* `CM_CONDA_BIN_PATH` +* `CM_CONDA_BIN_WITH_PATH` +* `CM_CONDA_LIB_PATH` +* `CM_CONDA_PREFIX` \ No newline at end of file diff --git a/docs/DevOps-automation/get-git-repo.md b/docs/DevOps-automation/get-git-repo.md new file mode 100644 index 000000000..17ecb4b5b --- /dev/null +++ b/docs/DevOps-automation/get-git-repo.md @@ -0,0 +1,240 @@ +Automatically generated README for this automation recipe: **get-git-repo** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-git-repo,ed603e7292974f10) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,git,repo,repository,clone* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get git repo repository clone" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,git,repo,repository,clone` + +`cm run script --tags=get,git,repo,repository,clone[,variations] [--input_flags]` + +*or* + +`cmr "get git repo repository clone"` + +`cmr "get git repo repository clone [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,git,repo,repository,clone' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,git,repo,repository,clone"``` + +#### Run this script via Docker (beta) + +`cm docker script "get git repo repository clone[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_lfs` + - Environment variables: + - *CM_GIT_REPO_NEEDS_LFS*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic,sys-util,_git-lfs + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * `_no-recurse-submodules` + - Environment variables: + - *CM_GIT_RECURSE_SUBMODULES*: `` + - Workflow: + * `_patch` + - Environment variables: + - *CM_GIT_PATCH*: `yes` + - Workflow: + * `_submodules.#` + - Environment variables: + - *CM_GIT_SUBMODULES*: `#` + - Workflow: + +
+ + + * Group "**checkout**" +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_BRANCH*: `#` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**git-history**" +
+ Click here to expand this section. + + * `_full-history` + - Environment variables: + - *CM_GIT_DEPTH*: `` + - Workflow: + * **`_short-history`** (default) + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 5` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + +
+ + +#### Default variations + +`_short-history` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--branch=value` → `CM_GIT_CHECKOUT=value` +* `--depth=value` → `CM_GIT_DEPTH=value` +* `--env_key=value` → `CM_GIT_ENV_KEY=value` +* `--folder=value` → `CM_GIT_CHECKOUT_FOLDER=value` +* `--patch=value` → `CM_GIT_PATCH=value` +* `--submodules=value` → `CM_GIT_RECURSE_SUBMODULES=value` +* `--update=value` → `CM_GIT_REPO_PULL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "branch":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_DEPTH: `--depth 4` +* CM_GIT_CHECKOUT_FOLDER: `repo` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: ` --recurse-submodules` +* CM_GIT_URL: `https://github.com/mlcommons/ck.git` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/_cm.json)*** + * pull,git,repo + * Enable this dependency only if all ENV vars are set:
+`{'CM_GIT_REPO_PULL': ['yes', 'True']}` + * CM names: `--adr.['pull-git-repo']...` + - CM script: [pull-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/pull-git-repo) + +___ +### Script output +`cmr "get git repo repository clone [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `<<>>` +* `CM_GIT_CHECKOUT_PATH` +* `CM_GIT_REPO_*` +#### New environment keys auto-detected from customize + +* `CM_GIT_CHECKOUT_PATH` +* `CM_GIT_REPO_CURRENT_HASH` \ No newline at end of file diff --git a/docs/DevOps-automation/get-github-cli.md b/docs/DevOps-automation/get-github-cli.md new file mode 100644 index 000000000..fee40cc4e --- /dev/null +++ b/docs/DevOps-automation/get-github-cli.md @@ -0,0 +1,120 @@ +Automatically generated README for this automation recipe: **get-github-cli** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-github-cli,1417029c6ff44f21) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,gh,gh-cli,github,cli,github-cli* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get gh gh-cli github cli github-cli" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,gh,gh-cli,github,cli,github-cli` + +`cm run script --tags=get,gh,gh-cli,github,cli,github-cli ` + +*or* + +`cmr "get gh gh-cli github cli github-cli"` + +`cmr "get gh gh-cli github cli github-cli " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,gh,gh-cli,github,cli,github-cli' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,gh,gh-cli,github,cli,github-cli"``` + +#### Run this script via Docker (beta) + +`cm docker script "get gh gh-cli github cli github-cli" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/_cm.json) + +___ +### Script output +`cmr "get gh gh-cli github cli github-cli " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/pull-git-repo.md b/docs/DevOps-automation/pull-git-repo.md new file mode 100644 index 000000000..63b1e3157 --- /dev/null +++ b/docs/DevOps-automation/pull-git-repo.md @@ -0,0 +1,134 @@ +Automatically generated README for this automation recipe: **pull-git-repo** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=pull-git-repo,c23132ed65c4421d) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *pull,git,repo,repository* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "pull git repo repository" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=pull,git,repo,repository` + +`cm run script --tags=pull,git,repo,repository [--input_flags]` + +*or* + +`cmr "pull git repo repository"` + +`cmr "pull git repo repository " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'pull,git,repo,repository' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="pull,git,repo,repository"``` + +#### Run this script via Docker (beta) + +`cm docker script "pull git repo repository" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--path=value` → `CM_GIT_CHECKOUT_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/_cm.json) + +___ +### Script output +`cmr "pull git repo repository " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/push-csv-to-spreadsheet.md b/docs/DevOps-automation/push-csv-to-spreadsheet.md new file mode 100644 index 000000000..124332bbc --- /dev/null +++ b/docs/DevOps-automation/push-csv-to-spreadsheet.md @@ -0,0 +1,142 @@ +Automatically generated README for this automation recipe: **push-csv-to-spreadsheet** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=push-csv-to-spreadsheet,5ec9e5fa7feb4fff) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "push google-spreadsheet spreadsheet push-to-google-spreadsheet" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet` + +`cm run script --tags=push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet [--input_flags]` + +*or* + +`cmr "push google-spreadsheet spreadsheet push-to-google-spreadsheet"` + +`cmr "push google-spreadsheet spreadsheet push-to-google-spreadsheet " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet"``` + +#### Run this script via Docker (beta) + +`cm docker script "push google-spreadsheet spreadsheet push-to-google-spreadsheet" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--csv_file=value` → `CM_CSV_FILE_PATH=value` +* `--sheet_name=value` → `CM_GOOGLE_SHEET_NAME=value` +* `--spreadsheet_id=value` → `CM_GOOGLE_SPREADSHEET_ID=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "csv_file":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GOOGLE_SPREADSHEET_ID: `1gMHjXmFmwZR4-waPPyxy5Pc3VARqX3kKUWxkP97Xa6Y` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_google-api-python-client + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_google-auth-oauthlib + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/_cm.json) + +___ +### Script output +`cmr "push google-spreadsheet spreadsheet push-to-google-spreadsheet " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/set-device-settings-qaic.md b/docs/DevOps-automation/set-device-settings-qaic.md new file mode 100644 index 000000000..2033b7695 --- /dev/null +++ b/docs/DevOps-automation/set-device-settings-qaic.md @@ -0,0 +1,143 @@ +Automatically generated README for this automation recipe: **set-device-settings-qaic** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=set-device-settings-qaic,408a1a1563b44780) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "set device qaic ai100 cloud performance power setting mode vc ecc" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc` + +`cm run script --tags=set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc[,variations] ` + +*or* + +`cmr "set device qaic ai100 cloud performance power setting mode vc ecc"` + +`cmr "set device qaic ai100 cloud performance power setting mode vc ecc [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc"``` + +#### Run this script via Docker (beta) + +`cm docker script "set device qaic ai100 cloud performance power setting mode vc ecc[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_ecc` + - Environment variables: + - *CM_QAIC_ECC*: `yes` + - Workflow: + * `_vc.#` + - Environment variables: + - *CM_QAIC_VC*: `#` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_QAIC_DEVICES: `0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/_cm.json)*** + * detect-os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,qaic,platform,sdk + - CM script: [get-qaic-platform-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-platform-sdk) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/_cm.json) + +___ +### Script output +`cmr "set device qaic ai100 cloud performance power setting mode vc ecc [,variations]" -j` +#### New environment keys (filter) + +* `CM_QAIC_DEVICE_*` +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/set-echo-off-win.md b/docs/DevOps-automation/set-echo-off-win.md new file mode 100644 index 000000000..46e87495c --- /dev/null +++ b/docs/DevOps-automation/set-echo-off-win.md @@ -0,0 +1,116 @@ +Automatically generated README for this automation recipe: **set-echo-off-win** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=set-echo-off-win,49d94b57524f4fcf) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *set,echo,off,win,echo-off-win,echo-off* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "set echo off win echo-off-win echo-off" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=set,echo,off,win,echo-off-win,echo-off` + +`cm run script --tags=set,echo,off,win,echo-off-win,echo-off ` + +*or* + +`cmr "set echo off win echo-off-win echo-off"` + +`cmr "set echo off win echo-off-win echo-off " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'set,echo,off,win,echo-off-win,echo-off' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="set,echo,off,win,echo-off-win,echo-off"``` + +#### Run this script via Docker (beta) + +`cm docker script "set echo off win echo-off-win echo-off" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/_cm.json) + +___ +### Script output +`cmr "set echo off win echo-off-win echo-off " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/set-performance-mode.md b/docs/DevOps-automation/set-performance-mode.md new file mode 100644 index 000000000..0fd389fe2 --- /dev/null +++ b/docs/DevOps-automation/set-performance-mode.md @@ -0,0 +1,180 @@ +Automatically generated README for this automation recipe: **set-performance-mode** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=set-performance-mode,2c0ab7b64692443d) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *set,system,performance,power,mode* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "set system performance power mode" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=set,system,performance,power,mode` + +`cm run script --tags=set,system,performance,power,mode[,variations] ` + +*or* + +`cmr "set system performance power mode"` + +`cmr "set system performance power mode [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'set,system,performance,power,mode' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="set,system,performance,power,mode"``` + +#### Run this script via Docker (beta) + +`cm docker script "set system performance power mode[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_reproducibility` + - Environment variables: + - *CM_SET_OS_PERFORMANCE_REPRODUCIBILITY_MODE*: `yes` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_SET_PERFORMANCE_MODE_OF*: `cpu` + - Workflow: + +
+ + + * Group "**performance-mode**" +
+ Click here to expand this section. + + * **`_performance`** (default) + - Environment variables: + - *CM_SET_PERFORMANCE_MODE*: `performance` + - Workflow: + +
+ + + * Group "**power**" +
+ Click here to expand this section. + + * `_power` + - Environment variables: + - *CM_SET_PERFORMANCE_MODE*: `power` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_performance` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/_cm.json)*** + * detect-os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect-cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/_cm.json) + 1. ***Run native script if exists*** + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/run-ubuntu.sh) + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/_cm.json) + +___ +### Script output +`cmr "set system performance power mode [,variations]" -j` +#### New environment keys (filter) + +* `OMP_*` +#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/set-sqlite-dir.md b/docs/DevOps-automation/set-sqlite-dir.md new file mode 100644 index 000000000..ec6e14441 --- /dev/null +++ b/docs/DevOps-automation/set-sqlite-dir.md @@ -0,0 +1,141 @@ +Automatically generated README for this automation recipe: **set-sqlite-dir** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=set-sqlite-dir,05904966355a43ac) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *set,sqlite,dir,sqlite-dir* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "set sqlite dir sqlite-dir" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=set,sqlite,dir,sqlite-dir` + +`cm run script --tags=set,sqlite,dir,sqlite-dir [--input_flags]` + +*or* + +`cmr "set sqlite dir sqlite-dir"` + +`cmr "set sqlite dir sqlite-dir " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'set,sqlite,dir,sqlite-dir' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="set,sqlite,dir,sqlite-dir"``` + +#### Run this script via Docker (beta) + +`cm docker script "set sqlite dir sqlite-dir" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--path=value` → `CM_SQLITE_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/_cm.json) + +___ +### Script output +`cmr "set sqlite dir sqlite-dir " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_SQLITE_PATH` +#### New environment keys auto-detected from customize + +* `CM_SQLITE_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/tar-my-folder.md b/docs/DevOps-automation/tar-my-folder.md new file mode 100644 index 000000000..2b3c6bce5 --- /dev/null +++ b/docs/DevOps-automation/tar-my-folder.md @@ -0,0 +1,133 @@ +Automatically generated README for this automation recipe: **tar-my-folder** + +Category: **DevOps automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=tar-my-folder,3784212e986c456b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,tar* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run tar" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,tar` + +`cm run script --tags=run,tar [--input_flags]` + +*or* + +`cmr "run tar"` + +`cmr "run tar " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,tar' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,tar"``` + +#### Run this script via Docker (beta) + +`cm docker script "run tar" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input_dir=value` → `CM_TAR_INPUT_DIR=value` +* `--outfile=value` → `CM_TAR_OUTFILE=value` +* `--output_dir=value` → `CM_TAR_OUTPUT_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/_cm.json) + +___ +### Script output +`cmr "run tar " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Docker-automation/build-docker-image.md b/docs/Docker-automation/build-docker-image.md new file mode 100644 index 000000000..e9eecd61f --- /dev/null +++ b/docs/Docker-automation/build-docker-image.md @@ -0,0 +1,160 @@ +Automatically generated README for this automation recipe: **build-docker-image** + +Category: **Docker automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=build-docker-image,2c3c4ba2413442e7) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *build,docker,image,docker-image,dockerimage* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "build docker image docker-image dockerimage" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=build,docker,image,docker-image,dockerimage` + +`cm run script --tags=build,docker,image,docker-image,dockerimage [--input_flags]` + +*or* + +`cmr "build docker image docker-image dockerimage"` + +`cmr "build docker image docker-image dockerimage " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'build,docker,image,docker-image,dockerimage' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="build,docker,image,docker-image,dockerimage"``` + +#### Run this script via Docker (beta) + +`cm docker script "build docker image docker-image dockerimage" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--cache=value` → `CM_DOCKER_CACHE=value` +* `--cm_repo=value` → `CM_MLOPS_REPO=value` +* `--docker_os=value` → `CM_DOCKER_OS=value` +* `--docker_os_version=value` → `CM_DOCKER_OS_VERSION=value` +* `--dockerfile=value` → `CM_DOCKERFILE_WITH_PATH=value` +* `--gh_token=value` → `CM_GH_TOKEN=value` +* `--image_name=value` → `CM_DOCKER_IMAGE_NAME=value` +* `--image_repo=value` → `CM_DOCKER_IMAGE_REPO=value` +* `--image_tag=value` → `CM_DOCKER_IMAGE_TAG=value` +* `--post_run_cmds=value` → `CM_DOCKER_POST_RUN_COMMANDS=value` +* `--pre_run_cmds=value` → `CM_DOCKER_PRE_RUN_COMMANDS=value` +* `--push_image=value` → `CM_DOCKER_PUSH_IMAGE=value` +* `--real_run=value` → `CM_REAL_RUN=value` +* `--script_tags=value` → `CM_DOCKER_RUN_SCRIPT_TAGS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "cache":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DOCKER_IMAGE_REPO: `local` +* CM_DOCKER_IMAGE_TAG: `latest` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/_cm.yaml)*** + * build,dockerfile + * Enable this dependency only if all ENV vars are set:
+`{'CM_BUILD_DOCKERFILE': ['yes', '1']}` + - CM script: [build-dockerfile](https://github.com/mlcommons/cm4mlops/tree/master/script/build-dockerfile) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/_cm.yaml) + +___ +### Script output +`cmr "build docker image docker-image dockerimage " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DOCKER_*` +#### New environment keys auto-detected from customize + +* `CM_DOCKER_BUILD_ARGS` +* `CM_DOCKER_BUILD_CMD` +* `CM_DOCKER_CACHE_ARG` +* `CM_DOCKER_IMAGE_NAME` +* `CM_DOCKER_IMAGE_REPO` +* `CM_DOCKER_IMAGE_TAG` \ No newline at end of file diff --git a/docs/Docker-automation/build-dockerfile.md b/docs/Docker-automation/build-dockerfile.md new file mode 100644 index 000000000..231cdc2f3 --- /dev/null +++ b/docs/Docker-automation/build-dockerfile.md @@ -0,0 +1,186 @@ +Automatically generated README for this automation recipe: **build-dockerfile** + +Category: **Docker automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=build-dockerfile,e66a7483230d4641) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *build,dockerfile* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "build dockerfile" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=build,dockerfile` + +`cm run script --tags=build,dockerfile[,variations] [--input_flags]` + +*or* + +`cmr "build dockerfile"` + +`cmr "build dockerfile [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'build,dockerfile' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="build,dockerfile"``` + +#### Run this script via Docker (beta) + +`cm docker script "build dockerfile[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_slim` + - Environment variables: + - *CM_DOCKER_BUILD_SLIM*: `yes` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--build=value` → `CM_BUILD_DOCKER_IMAGE=value` +* `--cache=value` → `CM_DOCKER_CACHE=value` +* `--cm_repo=value` → `CM_MLOPS_REPO=value` +* `--cm_repo_flags=value` → `CM_DOCKER_ADD_FLAG_TO_CM_MLOPS_REPO=value` +* `--cm_repos=value` → `CM_DOCKER_EXTRA_CM_REPOS=value` +* `--comments=value` → `CM_DOCKER_RUN_COMMENTS=value` +* `--copy_files=value` → `CM_DOCKER_COPY_FILES=value` +* `--docker_base_image=value` → `CM_DOCKER_IMAGE_BASE=value` +* `--docker_os=value` → `CM_DOCKER_OS=value` +* `--docker_os_version=value` → `CM_DOCKER_OS_VERSION=value` +* `--extra_sys_deps=value` → `CM_DOCKER_EXTRA_SYS_DEPS=value` +* `--fake_docker_deps=value` → `CM_DOCKER_FAKE_DEPS=value` +* `--fake_run_option=value` → `CM_DOCKER_FAKE_RUN_OPTION=value` +* `--file_path=value` → `CM_DOCKERFILE_WITH_PATH=value` +* `--gh_token=value` → `CM_GH_TOKEN=value` +* `--image_repo=value` → `CM_DOCKER_IMAGE_REPO=value` +* `--image_tag=value` → `CM_DOCKER_IMAGE_TAG=value` +* `--package_manager_update_cmd=value` → `CM_PACKAGE_MANAGER_UPDATE_CMD=value` +* `--pip_extra_flags=value` → `CM_DOCKER_PIP_INSTALL_EXTRA_FLAGS=value` +* `--post_file=value` → `DOCKER_IMAGE_POST_FILE=value` +* `--post_run_cmds=value` → `CM_DOCKER_POST_RUN_COMMANDS=value` +* `--pre_run_cmds=value` → `CM_DOCKER_PRE_RUN_COMMANDS=value` +* `--push_image=value` → `CM_DOCKER_PUSH_IMAGE=value` +* `--real_run=value` → `CM_REAL_RUN=value` +* `--run_cmd=value` → `CM_DOCKER_RUN_CMD=value` +* `--run_cmd_extra=value` → `CM_DOCKER_RUN_CMD_EXTRA=value` +* `--script_tags=value` → `CM_DOCKER_RUN_SCRIPT_TAGS=value` +* `--skip_cm_sys_upgrade=value` → `CM_DOCKER_SKIP_CM_SYS_UPGRADE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "build":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DOCKER_BUILD_SLIM: `no` +* CM_DOCKER_IMAGE_EOL: ` +` +* CM_DOCKER_OS: `ubuntu` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/_cm.yaml)*** + * build,docker,image + * Enable this dependency only if all ENV vars are set:
+`{'CM_BUILD_DOCKER_IMAGE': ['yes', '1']}` + * CM names: `--adr.['build-docker-image']...` + - CM script: [build-docker-image](https://github.com/mlcommons/cm4mlops/tree/master/script/build-docker-image) + +___ +### Script output +`cmr "build dockerfile [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DOCKERFILE_*` +#### New environment keys auto-detected from customize + +* `CM_DOCKERFILE_WITH_PATH` \ No newline at end of file diff --git a/docs/Docker-automation/prune-docker.md b/docs/Docker-automation/prune-docker.md new file mode 100644 index 000000000..513cc894b --- /dev/null +++ b/docs/Docker-automation/prune-docker.md @@ -0,0 +1,118 @@ +Automatically generated README for this automation recipe: **prune-docker** + +Category: **Docker automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=prune-docker,27ead88809bb4d4e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *prune,docker* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "prune docker" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=prune,docker` + +`cm run script --tags=prune,docker ` + +*or* + +`cmr "prune docker"` + +`cmr "prune docker " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'prune,docker' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="prune,docker"``` + +#### Run this script via Docker (beta) + +`cm docker script "prune docker" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/_cm.json) + +___ +### Script output +`cmr "prune docker " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Docker-automation/run-docker-container.md b/docs/Docker-automation/run-docker-container.md new file mode 100644 index 000000000..312e03f9f --- /dev/null +++ b/docs/Docker-automation/run-docker-container.md @@ -0,0 +1,166 @@ +Automatically generated README for this automation recipe: **run-docker-container** + +Category: **Docker automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-docker-container,1e0c884107514b46) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *run,docker,container* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run docker container" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,docker,container` + +`cm run script --tags=run,docker,container [--input_flags]` + +*or* + +`cmr "run docker container"` + +`cmr "run docker container " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,docker,container' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,docker,container"``` + +#### Run this script via Docker (beta) + +`cm docker script "run docker container" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--all_gpus=value` → `CM_DOCKER_ADD_ALL_GPUS=value` +* `--base=value` → `CM_DOCKER_IMAGE_BASE=value` +* `--cache=value` → `CM_DOCKER_CACHE=value` +* `--cm_repo=value` → `CM_MLOPS_REPO=value` +* `--detached=value` → `CM_DOCKER_DETACHED_MODE=value` +* `--device=value` → `CM_DOCKER_ADD_DEVICE=value` +* `--docker_image_base=value` → `CM_DOCKER_IMAGE_BASE=value` +* `--docker_os=value` → `CM_DOCKER_OS=value` +* `--docker_os_version=value` → `CM_DOCKER_OS_VERSION=value` +* `--extra_run_args=value` → `CM_DOCKER_EXTRA_RUN_ARGS=value` +* `--fake_run_option=value` → `CM_DOCKER_FAKE_RUN_OPTION=value` +* `--gh_token=value` → `CM_GH_TOKEN=value` +* `--image_name=value` → `CM_DOCKER_IMAGE_NAME=value` +* `--image_repo=value` → `CM_DOCKER_IMAGE_REPO=value` +* `--image_tag=value` → `CM_DOCKER_IMAGE_TAG=value` +* `--image_tag_extra=value` → `CM_DOCKER_IMAGE_TAG_EXTRA=value` +* `--interactive=value` → `CM_DOCKER_INTERACTIVE_MODE=value` +* `--it=value` → `CM_DOCKER_INTERACTIVE=value` +* `--mounts=value` → `CM_DOCKER_VOLUME_MOUNTS=value` +* `--pass_user_group=value` → `CM_DOCKER_PASS_USER_GROUP=value` +* `--port_maps=value` → `CM_DOCKER_PORT_MAPS=value` +* `--post_run_cmds=value` → `CM_DOCKER_POST_RUN_COMMANDS=value` +* `--pre_run_cmds=value` → `CM_DOCKER_PRE_RUN_COMMANDS=value` +* `--real_run=value` → `CM_REAL_RUN=value` +* `--recreate=value` → `CM_DOCKER_IMAGE_RECREATE=value` +* `--run_cmd=value` → `CM_DOCKER_RUN_CMD=value` +* `--run_cmd_extra=value` → `CM_DOCKER_RUN_CMD_EXTRA=value` +* `--save_script=value` → `CM_DOCKER_SAVE_SCRIPT=value` +* `--script_tags=value` → `CM_DOCKER_RUN_SCRIPT_TAGS=value` +* `--shm_size=value` → `CM_DOCKER_SHM_SIZE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "all_gpus":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DOCKER_DETACHED_MODE: `yes` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/_cm.yaml)*** + * build,docker,image + * Skip this dependenecy only if any of ENV vars are set:
+`{'CM_DOCKER_IMAGE_EXISTS': ['yes'], 'CM_DOCKER_SKIP_BUILD': ['yes']}` + * CM names: `--adr.['build-docker-image']...` + - CM script: [build-docker-image](https://github.com/mlcommons/cm4mlops/tree/master/script/build-docker-image) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/_cm.yaml) + +___ +### Script output +`cmr "run docker container " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/GUI/gui.md b/docs/GUI/gui.md new file mode 100644 index 000000000..c68754f51 --- /dev/null +++ b/docs/GUI/gui.md @@ -0,0 +1,243 @@ +Automatically generated README for this automation recipe: **gui** + +Category: **GUI** + +License: **Apache 2.0** + +Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=gui,605cac42514a4c69) ]* + +--- + +This CM script provides a unified GUI to run CM scripts using [Streamlit library](https://streamlit.io). + +If you want to run it in a cloud (Azure, AWS, GCP), you need to open some port and test that you can reach it from outside. + +By default, streamlit uses port 8501 but you can change it as follows: + +```bash +cm run script "cm gui" --port 80 +``` + +If you have troubles accessing this port, use this simple python module to test if your port is open: +```bash +python3 -m http.server 80 +``` + + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *cm,gui,cm-gui,script-gui,cm-script-gui,streamlit* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "cm gui cm-gui script-gui cm-script-gui streamlit" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=cm,gui,cm-gui,script-gui,cm-script-gui,streamlit` + +`cm run script --tags=cm,gui,cm-gui,script-gui,cm-script-gui,streamlit[,variations] [--input_flags]` + +*or* + +`cmr "cm gui cm-gui script-gui cm-script-gui streamlit"` + +`cmr "cm gui cm-gui script-gui cm-script-gui streamlit [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**script**=script tags +* --**app**=gui app + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "script":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'cm,gui,cm-gui,script-gui,cm-script-gui,streamlit' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="cm,gui,cm-gui,script-gui,cm-script-gui,streamlit"``` + +#### Run this script via Docker (beta) + +`cm docker script "cm gui cm-gui script-gui cm-script-gui streamlit[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**app**" +
+ Click here to expand this section. + + * `_chatgpt` + - Environment variables: + - *CM_GUI_APP*: `chatgpt` + - Workflow: + * `_graph` + - Environment variables: + - *CM_GUI_APP*: `graph` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * get,generic-python-lib,_matplotlib + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_mpld3 + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_main` + - Environment variables: + - *CM_GUI_APP*: `app` + - Workflow: + * `_playground` + - Environment variables: + - *CM_GUI_APP*: `playground` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * get,generic-python-lib,_matplotlib + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_mpld3 + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_streamlit_option_menu + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.plotly + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.streamlit-aggrid + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--address=value` → `CM_GUI_ADDRESS=value` +* `--app=value` → `CM_GUI_APP=value` +* `--exp_key_c=value` → `CM_GUI_GRAPH_EXPERIMENT_AXIS_KEY_C=value` +* `--exp_key_s=value` → `CM_GUI_GRAPH_EXPERIMENT_AXIS_KEY_S=value` +* `--exp_key_x=value` → `CM_GUI_GRAPH_EXPERIMENT_AXIS_KEY_X=value` +* `--exp_key_y=value` → `CM_GUI_GRAPH_EXPERIMENT_AXIS_KEY_Y=value` +* `--exp_max_results=value` → `CM_GUI_GRAPH_EXPERIMENT_MAX_RESULTS=value` +* `--exp_name=value` → `CM_GUI_GRAPH_EXPERIMENT_NAME=value` +* `--exp_tags=value` → `CM_GUI_GRAPH_EXPERIMENT_TAGS=value` +* `--exp_title=value` → `CM_GUI_GRAPH_EXPERIMENT_TITLE=value` +* `--exp_uid=value` → `CM_GUI_GRAPH_EXPERIMENT_RESULT_UID=value` +* `--no_browser=value` → `CM_GUI_NO_BROWSER=value` +* `--no_run=value` → `CM_GUI_NO_RUN=value` +* `--port=value` → `CM_GUI_PORT=value` +* `--prefix=value` → `CM_GUI_SCRIPT_PREFIX_LINUX=value` +* `--script=value` → `CM_GUI_SCRIPT_TAGS=value` +* `--title=value` → `CM_GUI_TITLE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "address":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GUI_EXTRA_CMD: `` +* CM_GUI_SCRIPT_PREFIX_LINUX: `gnome-terminal --` +* CM_GUI_APP: `app` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_cmind + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_streamlit + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/_cm.yaml) + +___ +### Script output +`cmr "cm gui cm-gui script-gui cm-script-gui streamlit [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Legacy-CK-support/get-ck-repo-mlops.md b/docs/Legacy-CK-support/get-ck-repo-mlops.md new file mode 100644 index 000000000..6977a1460 --- /dev/null +++ b/docs/Legacy-CK-support/get-ck-repo-mlops.md @@ -0,0 +1,120 @@ +Automatically generated README for this automation recipe: **get-ck-repo-mlops** + +Category: **Legacy CK support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ck-repo-mlops,d3a619b8186e4f74) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ck-repo,mlops,ck-repo-mlops* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ck-repo mlops ck-repo-mlops" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ck-repo,mlops,ck-repo-mlops` + +`cm run script --tags=get,ck-repo,mlops,ck-repo-mlops ` + +*or* + +`cmr "get ck-repo mlops ck-repo-mlops"` + +`cmr "get ck-repo mlops ck-repo-mlops " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ck-repo,mlops,ck-repo-mlops' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ck-repo,mlops,ck-repo-mlops"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ck-repo mlops ck-repo-mlops" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/_cm.json)*** + * get,ck + - CM script: [get-ck](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ck) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/_cm.json) + +___ +### Script output +`cmr "get ck-repo mlops ck-repo-mlops " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Legacy-CK-support/get-ck.md b/docs/Legacy-CK-support/get-ck.md new file mode 100644 index 000000000..29a213ab1 --- /dev/null +++ b/docs/Legacy-CK-support/get-ck.md @@ -0,0 +1,118 @@ +Automatically generated README for this automation recipe: **get-ck** + +Category: **Legacy CK support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ck,5575126797174cac) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ck,ck-framework* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ck ck-framework" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ck,ck-framework` + +`cm run script --tags=get,ck,ck-framework ` + +*or* + +`cmr "get ck ck-framework"` + +`cmr "get ck ck-framework " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ck,ck-framework' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ck,ck-framework"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ck ck-framework" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/_cm.json) + +___ +### Script output +`cmr "get ck ck-framework " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/add-custom-nvidia-system.md b/docs/MLPerf-benchmark-support/add-custom-nvidia-system.md new file mode 100644 index 000000000..64bf4f9e6 --- /dev/null +++ b/docs/MLPerf-benchmark-support/add-custom-nvidia-system.md @@ -0,0 +1,175 @@ +Automatically generated README for this automation recipe: **add-custom-nvidia-system** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=add-custom-nvidia-system,b2e6c46c6e8745a3) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *add,custom,system,nvidia* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "add custom system nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=add,custom,system,nvidia` + +`cm run script --tags=add,custom,system,nvidia[,variations] ` + +*or* + +`cmr "add custom system nvidia"` + +`cmr "add custom system nvidia [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'add,custom,system,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="add,custom,system,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "add custom system nvidia[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**code**" +
+ Click here to expand this section. + + * `_ctuning` + - Workflow: + * `_custom` + - Workflow: + * `_mlcommons` + - Workflow: + * `_nvidia-only` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +* `r2.1` +* `r3.0` +* `r3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cuda,_cudnn + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,tensorrt + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,generic-python-lib,_requests + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic,sys-util,_glog-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_gflags-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libre2-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libnuma-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libboost-all-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_rapidjson-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,nvidia,mlperf,inference,common-code + * CM names: `--adr.['nvidia-inference-common-code']...` + - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) + * get,generic-python-lib,_pycuda + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/_cm.yaml) + +___ +### Script output +`cmr "add custom system nvidia [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation.md b/docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation.md new file mode 100644 index 000000000..d84308de4 --- /dev/null +++ b/docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation.md @@ -0,0 +1,268 @@ +Automatically generated README for this automation recipe: **benchmark-any-mlperf-inference-implementation** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=benchmark-any-mlperf-inference-implementation,8d3cd46f54464810) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models` + +`cm run script --tags=benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models[,variations] [--input_flags]` + +*or* + +`cmr "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models"` + +`cmr "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models"``` + +#### Run this script via Docker (beta) + +`cm docker script "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_aws-dl2q.24xlarge,qualcomm` + - Workflow: + * `_mini,power` + - Workflow: + * `_orin,power` + - Workflow: + * `_phoenix,nvidia` + - Workflow: + * `_phoenix,power` + - Workflow: + * `_phoenix,reference` + - Workflow: + * `_rb6,power` + - Workflow: + * `_rb6,qualcomm` + - Workflow: + * `_rpi4,power` + - Workflow: + * `_sapphire-rapids.24c,nvidia` + - Workflow: + +
+ + + * Group "**implementation**" +
+ Click here to expand this section. + + * `_deepsparse` + - Environment variables: + - *DIVISION*: `open` + - *IMPLEMENTATION*: `deepsparse` + - Workflow: + * `_intel` + - Environment variables: + - *IMPLEMENTATION*: `intel` + - Workflow: + * `_mil` + - Environment variables: + - *IMPLEMENTATION*: `mil` + - Workflow: + * `_nvidia` + - Environment variables: + - *IMPLEMENTATION*: `nvidia-original` + - Workflow: + * `_qualcomm` + - Environment variables: + - *IMPLEMENTATION*: `qualcomm` + - Workflow: + * `_reference` + - Environment variables: + - *IMPLEMENTATION*: `reference` + - Workflow: + * `_tflite-cpp` + - Environment variables: + - *IMPLEMENTATION*: `tflite_cpp` + - Workflow: + +
+ + + * Group "**power**" +
+ Click here to expand this section. + + * **`_performance-only`** (default) + - Workflow: + * `_power` + - Environment variables: + - *POWER*: `True` + - Workflow: + +
+ + + * Group "**sut**" +
+ Click here to expand this section. + + * `_aws-dl2q.24xlarge` + - Workflow: + * `_macbookpro-m1` + - Environment variables: + - *CATEGORY*: `edge` + - *DIVISION*: `closed` + - Workflow: + * `_mini` + - Workflow: + * `_orin` + - Workflow: + * `_orin.32g` + - Environment variables: + - *CATEGORY*: `edge` + - *DIVISION*: `closed` + - Workflow: + * `_phoenix` + - Environment variables: + - *CATEGORY*: `edge` + - *DIVISION*: `closed` + - Workflow: + * `_rb6` + - Workflow: + * `_rpi4` + - Workflow: + * `_sapphire-rapids.24c` + - Environment variables: + - *CATEGORY*: `edge` + - *DIVISION*: `closed` + - Workflow: + +
+ + +#### Default variations + +`_performance-only` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--backends=value` → `BACKENDS=value` +* `--category=value` → `CATEGORY=value` +* `--devices=value` → `DEVICES=value` +* `--division=value` → `DIVISION=value` +* `--extra_args=value` → `EXTRA_ARGS=value` +* `--models=value` → `MODELS=value` +* `--power_server=value` → `POWER_SERVER=value` +* `--power_server_port=value` → `POWER_SERVER_PORT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "backends":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* DIVISION: `open` +* CATEGORY: `edge` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/_cm.yaml)*** + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/_cm.yaml) + 1. ***Run native script if exists*** + * [run-template.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/run-template.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/_cm.yaml) + +___ +### Script output +`cmr "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia.md b/docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia.md new file mode 100644 index 000000000..d97de8054 --- /dev/null +++ b/docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia.md @@ -0,0 +1,248 @@ +Automatically generated README for this automation recipe: **build-mlperf-inference-server-nvidia** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=build-mlperf-inference-server-nvidia,f37403af5e9f4541) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "build mlcommons mlperf inference inference-server server nvidia-harness nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia` + +`cm run script --tags=build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia[,variations] [--input_flags]` + +*or* + +`cmr "build mlcommons mlperf inference inference-server server nvidia-harness nvidia"` + +`cmr "build mlcommons mlperf inference inference-server server nvidia-harness nvidia [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "build mlcommons mlperf inference inference-server server nvidia-harness nvidia[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**code**" +
+ Click here to expand this section. + + * **`_ctuning`** (default) + - Workflow: + * `_custom` + - Workflow: + * `_mlcommons` + - Workflow: + * `_nvidia-only` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * `_cpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + * **`_cuda`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cuda` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - Workflow: + * `_inferentia` + - Environment variables: + - *CM_MLPERF_DEVICE*: `inferentia` + - Workflow: + +
+ + +#### Default variations + +`_ctuning,_cuda` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--clean=value` → `CM_MAKE_CLEAN=value` +* `--custom_system=value` → `CM_CUSTOM_SYSTEM_NVIDIA=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "clean":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MAKE_BUILD_COMMAND: `build` +* CM_MAKE_CLEAN: `no` +* CM_CUSTOM_SYSTEM_NVIDIA: `yes` + +
+ +#### Versions +Default version: `r3.1` + +* `r2.1` +* `r3.0` +* `r3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cuda,_cudnn + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['cuda', 'inferentia']}` + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,tensorrt,_dev + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['cuda', 'inferentia']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_TENSORRT_SYSTEM_DETECT': [True]}` + * CM names: `--adr.['tensorrt']...` + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,generic,sys-util,_glog-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_gflags-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libgmock-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libre2-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libnuma-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libboost-all-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_rapidjson-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,nvidia,mlperf,inference,common-code + * CM names: `--adr.['nvidia-inference-common-code']...` + - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) + * get,generic-python-lib,_package.pybind11 + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycuda + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_RUN_STATE_DOCKER': ['yes', True, 'True']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_nvidia-dali + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,mlperf,inference,nvidia,scratch,space + * CM names: `--adr.['nvidia-scratch-space']...` + - CM script: [get-mlperf-inference-nvidia-scratch-space](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-scratch-space) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/_cm.yaml)*** + * add,custom,system,nvidia + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CUSTOM_SYSTEM_NVIDIA': ['no', False, 'False']}` + * CM names: `--adr.['custom-system-nvidia', 'nvidia-inference-common-code']...` + - CM script: [add-custom-nvidia-system](https://github.com/mlcommons/cm4mlops/tree/master/script/add-custom-nvidia-system) + +___ +### Script output +`cmr "build mlcommons mlperf inference inference-server server nvidia-harness nvidia [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_INFERENCE_NVIDIA_CODE_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-inference-submission.md b/docs/MLPerf-benchmark-support/generate-mlperf-inference-submission.md new file mode 100644 index 000000000..e3a085fab --- /dev/null +++ b/docs/MLPerf-benchmark-support/generate-mlperf-inference-submission.md @@ -0,0 +1,191 @@ +Automatically generated README for this automation recipe: **generate-mlperf-inference-submission** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-mlperf-inference-submission,5f8ab2d0b5874d53) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission` + +`cm run script --tags=generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission [--input_flags]` + +*or* + +`cmr "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission"` + +`cmr "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission"``` + +#### Run this script via Docker (beta) + +`cm docker script "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--analyzer_settings_file=value` → `CM_MLPERF_POWER_ANALYZER_SETTINGS_FILE_PATH=value` +* `--category=value` → `CM_MLPERF_SUBMISSION_CATEGORY=value` +* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` +* `--dashboard=value` → `CM_MLPERF_DASHBOARD=value` +* `--dashboard_wb_project=value` → `CM_MLPERF_DASHBOARD_WANDB_PROJECT=value` +* `--device=value` → `CM_MLPERF_DEVICE=value` +* `--division=value` → `CM_MLPERF_SUBMISSION_DIVISION=value` +* `--duplicate=value` → `CM_MLPERF_DUPLICATE_SCENARIO_RESULTS=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--hw_notes_extra=value` → `CM_MLPERF_SUT_HW_NOTES_EXTRA=value` +* `--infer_scenario_results=value` → `CM_MLPERF_DUPLICATE_SCENARIO_RESULTS=value` +* `--power_settings_file=value` → `CM_MLPERF_POWER_SETTINGS_FILE_PATH=value` +* `--preprocess=value` → `CM_RUN_MLPERF_SUBMISSION_PREPROCESSOR=value` +* `--preprocess_submission=value` → `CM_RUN_MLPERF_SUBMISSION_PREPROCESSOR=value` +* `--results_dir=value` → `CM_MLPERF_INFERENCE_RESULTS_DIR_=value` +* `--run_checker=value` → `CM_RUN_SUBMISSION_CHECKER=value` +* `--run_style=value` → `CM_MLPERF_RUN_STYLE=value` +* `--skip_truncation=value` → `CM_SKIP_TRUNCATE_ACCURACY=value` +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` +* `--sw_notes_extra=value` → `CM_MLPERF_SUT_SW_NOTES_EXTRA=value` +* `--tar=value` → `CM_TAR_SUBMISSION_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "analyzer_settings_file":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_RUN_MLPERF_ACCURACY: `on` +* CM_MLPERF_RUN_STYLE: `valid` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,sut,system-description + - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) + * install,pip-package,for-cmind-python,_package.tabulate + - CM script: [install-pip-package-for-cmind-python](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pip-package-for-cmind-python) + * get,mlperf,inference,utils + - CM script: [get-mlperf-inference-utils](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-utils) + * get,mlperf,results,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_RESULTS_DIR_': ['on']}` + * CM names: `--adr.['get-mlperf-results-dir']...` + - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) + * get,mlperf,submission,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` + * CM names: `--adr.['get-mlperf-submission-dir']...` + - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/_cm.json)*** + * accuracy,truncate,mlc + * Enable this dependency only if all ENV vars are set:
+`{'CM_RUN_MLPERF_ACCURACY': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_TRUNCATE_ACCURACY': ['yes']}` + - CM script: [truncate-mlperf-inference-accuracy-log](https://github.com/mlcommons/cm4mlops/tree/master/script/truncate-mlperf-inference-accuracy-log) + * preprocess,mlperf,submission + * Enable this dependency only if all ENV vars are set:
+`{'CM_RUN_MLPERF_SUBMISSION_PREPROCESSOR': ['on', 'True', 'yes', True]}` + - CM script: [preprocess-mlperf-inference-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/preprocess-mlperf-inference-submission) + * submission,inference,checker,mlc + * Enable this dependency only if all ENV vars are set:
+`{'CM_RUN_SUBMISSION_CHECKER': ['yes']}` + * CM names: `--adr.['mlperf-inference-submission-checker', 'submission-checker']...` + - CM script: [run-mlperf-inference-submission-checker](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-inference-submission-checker) + +___ +### Script output +`cmr "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf.md b/docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf.md new file mode 100644 index 000000000..f248f0b50 --- /dev/null +++ b/docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf.md @@ -0,0 +1,199 @@ +Automatically generated README for this automation recipe: **generate-mlperf-inference-user-conf** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Developers: [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Thomas Zhu](https://www.linkedin.com/in/hanwen-zhu-483614189), [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-mlperf-inference-user-conf,3af4475745964b93) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *generate,mlperf,inference,user-conf,inference-user-conf* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "generate mlperf inference user-conf inference-user-conf" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=generate,mlperf,inference,user-conf,inference-user-conf` + +`cm run script --tags=generate,mlperf,inference,user-conf,inference-user-conf [--input_flags]` + +*or* + +`cmr "generate mlperf inference user-conf inference-user-conf"` + +`cmr "generate mlperf inference user-conf inference-user-conf " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'generate,mlperf,inference,user-conf,inference-user-conf' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="generate,mlperf,inference,user-conf,inference-user-conf"``` + +#### Run this script via Docker (beta) + +`cm docker script "generate mlperf inference user-conf inference-user-conf" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--num_threads=value` → `CM_NUM_THREADS=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `OUTPUT_BASE_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_PERFORMANCE_SAMPLE_COUNT=value` +* `--power=value` → `CM_MLPERF_POWER=value` +* `--regenerate_files=value` → `CM_REGENERATE_MEASURE_FILES=value` +* `--rerun=value` → `CM_RERUN=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--test_query_count=value` → `CM_TEST_QUERY_COUNT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "count":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_LOADGEN_MODE: `accuracy` +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_OUTPUT_FOLDER_NAME: `test_results` +* CM_MLPERF_RUN_STYLE: `test` +* CM_TEST_QUERY_COUNT: `10` +* CM_FAST_FACTOR: `5` +* CM_MLPERF_QUANTIZATION: `False` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,results,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'OUTPUT_BASE_DIR': ['on']}` + * CM names: `--adr.['get-mlperf-results-dir']...` + - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,sut,configs + - CM script: [get-mlperf-inference-sut-configs](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-configs) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/_cm.yaml) + +___ +### Script output +`cmr "generate mlperf inference user-conf inference-user-conf " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_HW_*` +* `CM_LOGS_DIR` +* `CM_MAX_EXAMPLES` +* `CM_MLPERF_*` +* `CM_SUT_*` +#### New environment keys auto-detected from customize + +* `CM_LOGS_DIR` +* `CM_MAX_EXAMPLES` +* `CM_MLPERF_ACCURACY_RESULTS_DIR` +* `CM_MLPERF_COMPLIANCE_RUN_POSTPONED` +* `CM_MLPERF_CONF` +* `CM_MLPERF_INFERENCE_AUDIT_PATH` +* `CM_MLPERF_INFERENCE_FINAL_RESULTS_DIR` +* `CM_MLPERF_INFERENCE_MIN_DURATION` +* `CM_MLPERF_LOADGEN_LOGS_DIR` +* `CM_MLPERF_LOADGEN_MODE` +* `CM_MLPERF_LOADGEN_QUERY_COUNT` +* `CM_MLPERF_LOADGEN_SCENARIO` +* `CM_MLPERF_LOADGEN_TARGET_LATENCY` +* `CM_MLPERF_LOADGEN_TARGET_QPS` +* `CM_MLPERF_OUTPUT_DIR` +* `CM_MLPERF_POWER_LOG_DIR` +* `CM_MLPERF_RANGING_USER_CONF` +* `CM_MLPERF_RUN_STYLE` +* `CM_MLPERF_SKIP_RUN` +* `CM_MLPERF_TESTING_USER_CONF` +* `CM_MLPERF_USER_CONF` +* `CM_MLPERF_USE_MAX_DURATION` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-tiny-report.md b/docs/MLPerf-benchmark-support/generate-mlperf-tiny-report.md new file mode 100644 index 000000000..c2df0a411 --- /dev/null +++ b/docs/MLPerf-benchmark-support/generate-mlperf-tiny-report.md @@ -0,0 +1,145 @@ +Automatically generated README for this automation recipe: **generate-mlperf-tiny-report** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-mlperf-tiny-report,709c3f3f9b3e4783) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *generate,mlperf,tiny,mlperf-tiny,report* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "generate mlperf tiny mlperf-tiny report" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=generate,mlperf,tiny,mlperf-tiny,report` + +`cm run script --tags=generate,mlperf,tiny,mlperf-tiny,report [--input_flags]` + +*or* + +`cmr "generate mlperf tiny mlperf-tiny report"` + +`cmr "generate mlperf tiny mlperf-tiny report " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'generate,mlperf,tiny,mlperf-tiny,report' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="generate,mlperf,tiny,mlperf-tiny,report"``` + +#### Run this script via Docker (beta) + +`cm docker script "generate mlperf tiny mlperf-tiny report" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--repo_tags=value` → `CM_IMPORT_TINYMLPERF_REPO_TAGS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "repo_tags":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_IMPORT_TINYMLPERF_REPO_TAGS: `1.1-private` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_xlsxwriter + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/_cm.yaml) + 1. ***Run native script if exists*** + * [run_submission_checker.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/run_submission_checker.bat) + * [run_submission_checker.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/run_submission_checker.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/_cm.yaml) + +___ +### Script output +`cmr "generate mlperf tiny mlperf-tiny report " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission.md b/docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission.md new file mode 100644 index 000000000..8e6de9427 --- /dev/null +++ b/docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission.md @@ -0,0 +1,414 @@ +Automatically generated README for this automation recipe: **generate-mlperf-tiny-submission** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-mlperf-tiny-submission,04289b9fc07b42b6) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission` + +`cm run script --tags=generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission ` + +*or* + +`cmr "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission"` + +`cmr "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission"``` + +#### Run this script via Docker (beta) + +`cm docker script "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,sut,system-description + - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/_cm.json)*** + * + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_RUN_STYLE': ['valid']}` + - CM script: [outdated-test-abtf-ssd-pytorch](outdated-test-abtf-ssd-pytorch) + - CM script: [run-how-to-run-server](https://github.com/how-to-run/server/tree/master/script/run-how-to-run-server) + - CM script: [get-dataset-cognata-mlcommons](https://github.com/mlcommons/cm4abtf/tree/master/script/get-dataset-cognata-mlcommons) + - CM script: [get-ml-model-abtf-ssd-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/get-ml-model-abtf-ssd-pytorch) + - CM script: [test-ssd-resnet50-cognata-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/test-ssd-resnet50-cognata-pytorch) + - CM script: [reproduce-ieee-acm-micro2023-paper-22](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-22) + - CM script: [reproduce-ieee-acm-micro2023-paper-28](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-28) + - CM script: [reproduce-ieee-acm-micro2023-paper-33](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-33) + - CM script: [reproduce-ieee-acm-micro2023-paper-38](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-38) + - CM script: [reproduce-ieee-acm-micro2023-paper-5](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-5) + - CM script: [reproduce-ieee-acm-micro2023-paper-8](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-8) + - CM script: [reproduce-ieee-acm-micro2023-paper-85](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-85) + - CM script: [reproduce-ieee-acm-micro2023-paper-87](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-87) + - CM script: [reproduce-ieee-acm-micro2023-paper-96](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-96) + - CM script: [reproduce-ipol-paper-2022-439a](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ipol-paper-2022-439a) + - CM script: [reproduce-neurips-paper-2022-arxiv-2204.09656](https://github.com/ctuning/cm4research/tree/master/script/reproduce-neurips-paper-2022-arxiv-2204.09656) + - CM script: [process-mlperf-inference-results](https://github.com/mlcommons/cm4mlperf-results/tree/master/script/process-mlperf-inference-results) + - CM script: [get-target-device](https://github.com/cknowledge/cm4research/tree/master/script/get-target-device) + - CM script: [run-refiners-hello-world](https://github.com/cknowledge/cm4research/tree/master/script/run-refiners-hello-world) + - CM script: [test-mlperf-loadgen-cpp](https://github.com/cknowledge/cm4research/tree/master/script/test-mlperf-loadgen-cpp) + - CM script: [test-mlperf-loadgen-py](https://github.com/cknowledge/cm4research/tree/master/script/test-mlperf-loadgen-py) + - CM script: [test-onnxruntime-cpp](https://github.com/cknowledge/cm4research/tree/master/script/test-onnxruntime-cpp) + - CM script: [app-generate-image-dalle-mini-jax-py](https://github.com/cknowledge/cm4research-private/tree/master/script/app-generate-image-dalle-mini-jax-py) + - CM script: [app-generate-image-stable-diffusion2-pytorch-cuda-py](https://github.com/cknowledge/cm4research-private/tree/master/script/app-generate-image-stable-diffusion2-pytorch-cuda-py) + - CM script: [app-image-classification-onnx-py-ck](https://github.com/cknowledge/cm4research-private/tree/master/script/app-image-classification-onnx-py-ck) + - CM script: [app-image-corner-detection-old](https://github.com/cknowledge/cm4research-private/tree/master/script/app-image-corner-detection-old) + - CM script: [app-mlperf-inference-nvidia](https://github.com/cknowledge/cm4research-private/tree/master/script/app-mlperf-inference-nvidia) + - CM script: [app-stable-diffusion-pytorch-cuda-py](https://github.com/cknowledge/cm4research-private/tree/master/script/app-stable-diffusion-pytorch-cuda-py) + - CM script: [get-axs](https://github.com/cknowledge/cm4research-private/tree/master/script/get-axs) + - CM script: [gui-llm](https://github.com/cknowledge/cm4research-private/tree/master/script/gui-llm) + - CM script: [not-needed--get-android-cmdline-tools](https://github.com/cknowledge/cm4research-private/tree/master/script/not-needed--get-android-cmdline-tools) + - CM script: [not-needed--install-android-cmdline-tools](https://github.com/cknowledge/cm4research-private/tree/master/script/not-needed--install-android-cmdline-tools) + - CM script: [test-script1](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script1) + - CM script: [test-script2](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script2) + - CM script: [test-script3](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script3) + - CM script: [test-script4](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script4) + - CM script: [test-script5](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script5) + - CM script: [activate-python-venv](https://github.com/mlcommons/cm4mlops/tree/master/script/activate-python-venv) + - CM script: [add-custom-nvidia-system](https://github.com/mlcommons/cm4mlops/tree/master/script/add-custom-nvidia-system) + - CM script: [app-image-classification-onnx-py](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-classification-onnx-py) + - CM script: [app-image-classification-tf-onnx-cpp](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-classification-tf-onnx-cpp) + - CM script: [app-image-classification-torch-py](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-classification-torch-py) + - CM script: [app-image-classification-tvm-onnx-py](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-classification-tvm-onnx-py) + - CM script: [app-image-corner-detection](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-corner-detection) + - CM script: [app-loadgen-generic-python](https://github.com/mlcommons/cm4mlops/tree/master/script/app-loadgen-generic-python) + - CM script: [app-mlperf-inference](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference) + - CM script: [app-mlperf-inference-ctuning-cpp-tflite](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-ctuning-cpp-tflite) + - CM script: [app-mlperf-inference-dummy](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-dummy) + - CM script: [app-mlperf-inference-intel](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-intel) + - CM script: [app-mlperf-inference-mlcommons-cpp](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-mlcommons-cpp) + - CM script: [app-mlperf-inference-mlcommons-python](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-mlcommons-python) + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + - CM script: [app-mlperf-inference-qualcomm](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-qualcomm) + - CM script: [app-mlperf-training-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-training-nvidia) + - CM script: [app-mlperf-training-reference](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-training-reference) + - CM script: [app-stable-diffusion-onnx-py](https://github.com/mlcommons/cm4mlops/tree/master/script/app-stable-diffusion-onnx-py) + - CM script: [benchmark-any-mlperf-inference-implementation](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-any-mlperf-inference-implementation) + - CM script: [benchmark-program](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program) + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + - CM script: [build-docker-image](https://github.com/mlcommons/cm4mlops/tree/master/script/build-docker-image) + - CM script: [build-dockerfile](https://github.com/mlcommons/cm4mlops/tree/master/script/build-dockerfile) + - CM script: [build-mlperf-inference-server-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/build-mlperf-inference-server-nvidia) + - CM script: [calibrate-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/calibrate-model-for.qaic) + - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) + - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) + - CM script: [convert-csv-to-md](https://github.com/mlcommons/cm4mlops/tree/master/script/convert-csv-to-md) + - CM script: [convert-ml-model-huggingface-to-onnx](https://github.com/mlcommons/cm4mlops/tree/master/script/convert-ml-model-huggingface-to-onnx) + - CM script: [copy-to-clipboard](https://github.com/mlcommons/cm4mlops/tree/master/script/copy-to-clipboard) + - CM script: [create-conda-env](https://github.com/mlcommons/cm4mlops/tree/master/script/create-conda-env) + - CM script: [create-fpgaconvnet-app-tinyml](https://github.com/mlcommons/cm4mlops/tree/master/script/create-fpgaconvnet-app-tinyml) + - CM script: [create-fpgaconvnet-config-tinyml](https://github.com/mlcommons/cm4mlops/tree/master/script/create-fpgaconvnet-config-tinyml) + - CM script: [create-patch](https://github.com/mlcommons/cm4mlops/tree/master/script/create-patch) + - CM script: [destroy-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/destroy-terraform) + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + - CM script: [detect-sudo](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-sudo) + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + - CM script: [download-torrent](https://github.com/mlcommons/cm4mlops/tree/master/script/download-torrent) + - CM script: [dump-pip-freeze](https://github.com/mlcommons/cm4mlops/tree/master/script/dump-pip-freeze) + - CM script: [extract-file](https://github.com/mlcommons/cm4mlops/tree/master/script/extract-file) + - CM script: [fail](https://github.com/mlcommons/cm4mlops/tree/master/script/fail) + - CM script: [flash-tinyml-binary](https://github.com/mlcommons/cm4mlops/tree/master/script/flash-tinyml-binary) + - CM script: [generate-mlperf-inference-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-submission) + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + - CM script: [generate-mlperf-tiny-report](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-tiny-report) + - CM script: [generate-mlperf-tiny-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-tiny-submission) + - CM script: [generate-nvidia-engine](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-nvidia-engine) + - CM script: [get-android-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-android-sdk) + - CM script: [get-aocl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aocl) + - CM script: [get-aria2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aria2) + - CM script: [get-aws-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aws-cli) + - CM script: [get-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bazel) + - CM script: [get-bert-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bert-squad-vocab) + - CM script: [get-blis](https://github.com/mlcommons/cm4mlops/tree/master/script/get-blis) + - CM script: [get-brew](https://github.com/mlcommons/cm4mlops/tree/master/script/get-brew) + - CM script: [get-ck](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ck) + - CM script: [get-ck-repo-mlops](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ck-repo-mlops) + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + - CM script: [get-cmsis_5](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmsis_5) + - CM script: [get-compiler-flags](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-flags) + - CM script: [get-compiler-rust](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-rust) + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + - CM script: [get-croissant](https://github.com/mlcommons/cm4mlops/tree/master/script/get-croissant) + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + - CM script: [get-cuda-devices](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda-devices) + - CM script: [get-cudnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cudnn) + - CM script: [get-dataset-cifar10](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cifar10) + - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) + - CM script: [get-dataset-coco](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-coco) + - CM script: [get-dataset-coco2014](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-coco2014) + - CM script: [get-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-criteo) + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + - CM script: [get-dataset-imagenet-calibration](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-calibration) + - CM script: [get-dataset-imagenet-helper](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-helper) + - CM script: [get-dataset-imagenet-train](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-train) + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + - CM script: [get-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-kits19) + - CM script: [get-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-librispeech) + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + - CM script: [get-dataset-openimages-annotations](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-annotations) + - CM script: [get-dataset-openimages-calibration](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-calibration) + - CM script: [get-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openorca) + - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + - CM script: [get-dlrm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dlrm) + - CM script: [get-dlrm-data-mlperf-inference](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dlrm-data-mlperf-inference) + - CM script: [get-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/get-docker) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + - CM script: [get-github-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/get-github-cli) + - CM script: [get-go](https://github.com/mlcommons/cm4mlops/tree/master/script/get-go) + - CM script: [get-google-saxml](https://github.com/mlcommons/cm4mlops/tree/master/script/get-google-saxml) + - CM script: [get-google-test](https://github.com/mlcommons/cm4mlops/tree/master/script/get-google-test) + - CM script: [get-ipol-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ipol-src) + - CM script: [get-java](https://github.com/mlcommons/cm4mlops/tree/master/script/get-java) + - CM script: [get-javac](https://github.com/mlcommons/cm4mlops/tree/master/script/get-javac) + - CM script: [get-lib-armnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-armnn) + - CM script: [get-lib-dnnl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-dnnl) + - CM script: [get-lib-protobuf](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-protobuf) + - CM script: [get-lib-qaic-api](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-qaic-api) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + - CM script: [get-microtvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-microtvm) + - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) + - CM script: [get-ml-model-bert-base-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-base-squad) + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) + - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + - CM script: [get-ml-model-retinanet-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet-nvidia) + - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) + - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) + - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) + - CM script: [get-ml-model-using-imagenet-from-model-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-using-imagenet-from-model-zoo) + - CM script: [get-mlperf-inference-intel-scratch-space](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-intel-scratch-space) + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) + - CM script: [get-mlperf-inference-nvidia-scratch-space](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-scratch-space) + - CM script: [get-mlperf-inference-results](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results) + - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) + - CM script: [get-mlperf-inference-sut-configs](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-configs) + - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) + - CM script: [get-mlperf-inference-utils](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-utils) + - CM script: [get-mlperf-logging](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-logging) + - CM script: [get-mlperf-power-dev](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-power-dev) + - CM script: [get-mlperf-tiny-eembc-energy-runner-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-tiny-eembc-energy-runner-src) + - CM script: [get-mlperf-tiny-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-tiny-src) + - CM script: [get-mlperf-training-nvidia-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-nvidia-code) + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + - CM script: [get-nvidia-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/get-nvidia-docker) + - CM script: [get-nvidia-mitten](https://github.com/mlcommons/cm4mlops/tree/master/script/get-nvidia-mitten) + - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) + - CM script: [get-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-openssl) + - CM script: [get-preprocessed-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-criteo) + - CM script: [get-preprocesser-script-generic](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocesser-script-generic) + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + - CM script: [get-preprocessed-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-kits19) + - CM script: [get-preprocessed-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-librispeech) + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + - CM script: [get-preprocessed-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openorca) + - CM script: [get-preprocessed-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-squad) + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + - CM script: [get-qaic-apps-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-apps-sdk) + - CM script: [get-qaic-platform-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-platform-sdk) + - CM script: [get-qaic-software-kit](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-software-kit) + - CM script: [get-rclone](https://github.com/mlcommons/cm4mlops/tree/master/script/get-rclone) + - CM script: [get-rocm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-rocm) + - CM script: [get-spec-ptd](https://github.com/mlcommons/cm4mlops/tree/master/script/get-spec-ptd) + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + - CM script: [get-sys-utils-min](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-min) + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + - CM script: [get-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/get-terraform) + - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) + - CM script: [get-tvm-model](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm-model) + - CM script: [get-xilinx-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-xilinx-sdk) + - CM script: [get-zendnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zendnn) + - CM script: [get-zephyr](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr) + - CM script: [get-zephyr-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr-sdk) + - CM script: [gui](https://github.com/mlcommons/cm4mlops/tree/master/script/gui) + - CM script: [import-mlperf-inference-to-experiment](https://github.com/mlcommons/cm4mlops/tree/master/script/import-mlperf-inference-to-experiment) + - CM script: [import-mlperf-tiny-to-experiment](https://github.com/mlcommons/cm4mlops/tree/master/script/import-mlperf-tiny-to-experiment) + - CM script: [import-mlperf-training-to-experiment](https://github.com/mlcommons/cm4mlops/tree/master/script/import-mlperf-training-to-experiment) + - CM script: [install-aws-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/install-aws-cli) + - CM script: [install-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/install-bazel) + - CM script: [install-cmake-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cmake-prebuilt) + - CM script: [install-cuda-package-manager](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cuda-package-manager) + - CM script: [install-cuda-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cuda-prebuilt) + - CM script: [install-gcc-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-gcc-src) + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + - CM script: [install-gflags](https://github.com/mlcommons/cm4mlops/tree/master/script/install-gflags) + - CM script: [install-github-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/install-github-cli) + - CM script: [install-ipex-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-ipex-from-src) + - CM script: [install-llvm-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-prebuilt) + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + - CM script: [install-mlperf-logging-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-mlperf-logging-from-src) + - CM script: [install-nccl-libs](https://github.com/mlcommons/cm4mlops/tree/master/script/install-nccl-libs) + - CM script: [install-numactl-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-numactl-from-src) + - CM script: [install-onednn-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-onednn-from-src) + - CM script: [install-onnxruntime-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-onnxruntime-from-src) + - CM script: [install-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/install-openssl) + - CM script: [install-pip-package-for-cmind-python](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pip-package-for-cmind-python) + - CM script: [install-python-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-python-src) + - CM script: [install-python-venv](https://github.com/mlcommons/cm4mlops/tree/master/script/install-python-venv) + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + - CM script: [install-pytorch-kineto-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-kineto-from-src) + - CM script: [install-qaic-compute-sdk-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-qaic-compute-sdk-from-src) + - CM script: [install-rocm](https://github.com/mlcommons/cm4mlops/tree/master/script/install-rocm) + - CM script: [install-tensorflow-for-c](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-for-c) + - CM script: [install-tensorflow-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-from-src) + - CM script: [install-terraform-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-terraform-from-src) + - CM script: [install-tflite-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tflite-from-src) + - CM script: [install-torchvision-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-torchvision-from-src) + - CM script: [install-tpp-pytorch-extension](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tpp-pytorch-extension) + - CM script: [install-transformers-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-transformers-from-src) + - CM script: [launch-benchmark](https://github.com/mlcommons/cm4mlops/tree/master/script/launch-benchmark) + - CM script: [prepare-training-data-bert](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-bert) + - CM script: [prepare-training-data-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-resnet) + - CM script: [preprocess-mlperf-inference-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/preprocess-mlperf-inference-submission) + - CM script: [print-croissant-desc](https://github.com/mlcommons/cm4mlops/tree/master/script/print-croissant-desc) + - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) + - CM script: [print-hello-world-java](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world-java) + - CM script: [print-hello-world-javac](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world-javac) + - CM script: [print-hello-world-py](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world-py) + - CM script: [print-python-version](https://github.com/mlcommons/cm4mlops/tree/master/script/print-python-version) + - CM script: [process-ae-users](https://github.com/mlcommons/cm4mlops/tree/master/script/process-ae-users) + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + - CM script: [prune-bert-models](https://github.com/mlcommons/cm4mlops/tree/master/script/prune-bert-models) + - CM script: [prune-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/prune-docker) + - CM script: [publish-results-to-dashboard](https://github.com/mlcommons/cm4mlops/tree/master/script/publish-results-to-dashboard) + - CM script: [pull-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/pull-git-repo) + - CM script: [push-csv-to-spreadsheet](https://github.com/mlcommons/cm4mlops/tree/master/script/push-csv-to-spreadsheet) + - CM script: [push-mlperf-inference-results-to-github](https://github.com/mlcommons/cm4mlops/tree/master/script/push-mlperf-inference-results-to-github) + - CM script: [remote-run-commands](https://github.com/mlcommons/cm4mlops/tree/master/script/remote-run-commands) + - CM script: [reproduce-ipol-paper-2022-439](https://github.com/mlcommons/cm4mlops/tree/master/script/reproduce-ipol-paper-2022-439) + - CM script: [reproduce-mlperf-octoml-tinyml-results](https://github.com/mlcommons/cm4mlops/tree/master/script/reproduce-mlperf-octoml-tinyml-results) + - CM script: [reproduce-mlperf-training-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/reproduce-mlperf-training-nvidia) + - CM script: [run-docker-container](https://github.com/mlcommons/cm4mlops/tree/master/script/run-docker-container) + - CM script: [run-mlperf-inference-app](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-inference-app) + - CM script: [run-mlperf-inference-mobilenet-models](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-inference-mobilenet-models) + - CM script: [run-mlperf-inference-submission-checker](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-inference-submission-checker) + - CM script: [run-mlperf-power-client](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-power-client) + - CM script: [run-mlperf-power-server](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-power-server) + - CM script: [run-mlperf-training-submission-checker](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-training-submission-checker) + - CM script: [run-python](https://github.com/mlcommons/cm4mlops/tree/master/script/run-python) + - CM script: [run-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/run-terraform) + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + - CM script: [set-echo-off-win](https://github.com/mlcommons/cm4mlops/tree/master/script/set-echo-off-win) + - CM script: [set-performance-mode](https://github.com/mlcommons/cm4mlops/tree/master/script/set-performance-mode) + - CM script: [set-sqlite-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/set-sqlite-dir) + - CM script: [set-venv](https://github.com/mlcommons/cm4mlops/tree/master/script/set-venv) + - CM script: [tar-my-folder](https://github.com/mlcommons/cm4mlops/tree/master/script/tar-my-folder) + - CM script: [test-deps-conditions](https://github.com/mlcommons/cm4mlops/tree/master/script/test-deps-conditions) + - CM script: [test-download-and-extract-artifacts](https://github.com/mlcommons/cm4mlops/tree/master/script/test-download-and-extract-artifacts) + - CM script: [test-mlperf-inference-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/test-mlperf-inference-retinanet) + - CM script: [test-set-sys-user-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/test-set-sys-user-cm) + - CM script: [truncate-mlperf-inference-accuracy-log](https://github.com/mlcommons/cm4mlops/tree/master/script/truncate-mlperf-inference-accuracy-log) + - CM script: [upgrade-python-pip](https://github.com/mlcommons/cm4mlops/tree/master/script/upgrade-python-pip) + - CM script: [wrapper-reproduce-octoml-tinyml-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/wrapper-reproduce-octoml-tinyml-submission) + +___ +### Script output +`cmr "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/generate-nvidia-engine.md b/docs/MLPerf-benchmark-support/generate-nvidia-engine.md new file mode 100644 index 000000000..ca8540fe6 --- /dev/null +++ b/docs/MLPerf-benchmark-support/generate-nvidia-engine.md @@ -0,0 +1,244 @@ +Automatically generated README for this automation recipe: **generate-nvidia-engine** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-nvidia-engine,0eef9f05b272401f) ]* + +--- + +This CM script is in draft stage + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *generate,engine,mlperf,inference,nvidia* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "generate engine mlperf inference nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=generate,engine,mlperf,inference,nvidia` + +`cm run script --tags=generate,engine,mlperf,inference,nvidia[,variations] [--input_flags]` + +*or* + +`cmr "generate engine mlperf inference nvidia"` + +`cmr "generate engine mlperf inference nvidia [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'generate,engine,mlperf,inference,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="generate,engine,mlperf,inference,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "generate engine mlperf inference nvidia[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_MODEL_BATCH_SIZE*: `None` + - Workflow: + * `_copy_streams.#` + - Environment variables: + - *CM_GPU_COPY_STREAMS*: `None` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_resnet50` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "output_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` +* CM_LOADGEN_SCENARIO: `Offline` +* CM_GPU_COPY_STREAMS: `1` +* CM_TENSORRT_WORKSPACE_SIZE: `4194304` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,cuda,_cudnn + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,tensorrt + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycuda + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,loadgen + * CM names: `--adr.['loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,nvidia,mlperf,inference,common-code + * CM names: `--adr.['nvidia-inference-common-code']...` + - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) + * get,dataset,preprocessed,imagenet,_NCHW + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['imagenet-preprocessed']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,ml-model,resnet50,_onnx + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['ml-model', 'resnet50-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,dataset,preprocessed,openimages,_validation,_NCHW + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['openimages-preprocessed']...` + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + * get,ml-model,retinanet,_onnx,_fp32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['ml-model', 'retinanet-model']...` + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/_cm.yaml) + +___ +### Script output +`cmr "generate engine mlperf inference nvidia [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_MLPERF_*` +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space.md new file mode 100644 index 000000000..64717e3ce --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space.md @@ -0,0 +1,161 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-intel-scratch-space** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-intel-scratch-space,e83fca30851f45ef) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,intel,scratch,space* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf inference intel scratch space" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,inference,intel,scratch,space` + +`cm run script --tags=get,mlperf,inference,intel,scratch,space[,variations] [--input_flags]` + +*or* + +`cmr "get mlperf inference intel scratch space"` + +`cmr "get mlperf inference intel scratch space [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,inference,intel,scratch,space' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,inference,intel,scratch,space"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf inference intel scratch space[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**version**" +
+ Click here to expand this section. + + * `_version.#` + - Environment variables: + - *CM_INTEL_SCRATCH_SPACE_VERSION*: `#` + - Workflow: + * **`_version.4_0`** (default) + - Environment variables: + - *CM_INTEL_SCRATCH_SPACE_VERSION*: `4_0` + - Workflow: + +
+ + +#### Default variations + +`_version.4_0` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--scratch_path=value` → `MLPERF_INTEL_SCRATCH_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "scratch_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/_cm.json) + +___ +### Script output +`cmr "get mlperf inference intel scratch space [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_INTEL_MLPERF_SCRATCH_PATH` +* `CM_INTEL_SCRATCH_SPACE_VERSION` +#### New environment keys auto-detected from customize + +* `CM_INTEL_MLPERF_SCRATCH_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen.md new file mode 100644 index 000000000..979a870b0 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen.md @@ -0,0 +1,224 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-loadgen** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-loadgen,64c3d98d0ba04950) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *get,loadgen,inference,inference-loadgen,mlperf,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get loadgen inference inference-loadgen mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,loadgen,inference,inference-loadgen,mlperf,mlcommons` + +`cm run script --tags=get,loadgen,inference,inference-loadgen,mlperf,mlcommons[,variations] ` + +*or* + +`cmr "get loadgen inference inference-loadgen mlperf mlcommons"` + +`cmr "get loadgen inference inference-loadgen mlperf mlcommons [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,loadgen,inference,inference-loadgen,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,loadgen,inference,inference-loadgen,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "get loadgen inference inference-loadgen mlperf mlcommons[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_copy` + - Workflow: + * `_custom-python` + - Environment variables: + - *CM_TMP_USE_CUSTOM_PYTHON*: `on` + - Workflow: + * `_download` + - Environment variables: + - *CM_DOWNLOAD_CHECKSUM*: `af3f9525965b2c1acc348fb882a5bfd1` + - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD*: `YES` + - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD_URL*: `https://www.dropbox.com/scl/fi/36dgoiur26i2tvwgsaatf/loadgen.zip?rlkey=ab68i7uza9anvaw0hk1xvf0qk&dl=0` + - *CM_MLPERF_INFERENCE_LOADGEN_VERSION*: `v3.1` + - *CM_VERIFY_SSL*: `False` + - Workflow: + * `_download_v3.1` + - Environment variables: + - *CM_DOWNLOAD_CHECKSUM*: `af3f9525965b2c1acc348fb882a5bfd1` + - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD*: `YES` + - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD_URL*: `https://www.dropbox.com/scl/fi/36dgoiur26i2tvwgsaatf/loadgen.zip?rlkey=ab68i7uza9anvaw0hk1xvf0qk&dl=0` + - *CM_MLPERF_INFERENCE_LOADGEN_VERSION*: `v3.1` + - *CM_VERIFY_SSL*: `False` + - Workflow: + * `_download_v4.0` + - Environment variables: + - *CM_DOWNLOAD_CHECKSUM*: `b4d97525d9ad0539a64667f2a3ca20c5` + - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD*: `YES` + - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD_URL*: `https://www.dropbox.com/scl/fi/gk5e9kziju5t56umxyzyx/loadgen.zip?rlkey=vsie4xnzml1inpjplm5cg7t54&dl=0` + - *CM_MLPERF_INFERENCE_LOADGEN_VERSION*: `v4.0` + - *CM_VERIFY_SSL*: `False` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SHARED_BUILD: `no` + +
+ +#### Versions +Default version: `master` + +* `custom` +* `main` +* `master` +* `pybind_fix` +* `r2.1` +* `r3.0` +* `r3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD': ['YES']}` + * CM names: `--adr.['inference-src-loadgen']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * download-and-extract,file,_wget,_extract + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD': ['YES']}` + * CM names: `--adr.['inference-src-loadgen-download']...` + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + * get,compiler + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_HOST_OS_TYPE': ['windows']}` + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + * get,cl + * Enable this dependency only if all ENV vars are set:
+`{'CM_HOST_OS_TYPE': ['windows']}` + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + * get,cmake + * CM names: `--adr.['cmake']...` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,generic-python-lib,_package.wheel + * CM names: `--adr.['pip-package', 'wheel']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pip + * CM names: `--adr.['pip-package', 'pip']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.pybind11 + * CM names: `--adr.['pip-package', 'pybind11']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.setuptools + * CM names: `--adr.['pip-package', 'setuputils']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/_cm.yaml) + +___ +### Script output +`cmr "get loadgen inference inference-loadgen mlperf mlcommons [,variations]" -j` +#### New environment keys (filter) + +* `+CPLUS_INCLUDE_PATH` +* `+C_INCLUDE_PATH` +* `+DYLD_FALLBACK_LIBRARY_PATH` +* `+LD_LIBRARY_PATH` +* `+PYTHONPATH` +* `CM_MLPERF_INFERENCE_LOADGEN_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_INFERENCE_LOADGEN_INCLUDE_PATH` +* `CM_MLPERF_INFERENCE_LOADGEN_INSTALL_PATH` +* `CM_MLPERF_INFERENCE_LOADGEN_LIBRARY_PATH` +* `CM_MLPERF_INFERENCE_LOADGEN_PYTHON_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code.md new file mode 100644 index 000000000..81faf43e8 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code.md @@ -0,0 +1,150 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-nvidia-common-code** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-nvidia-common-code,26b78bf3ffdc4926) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,nvidia,mlperf,inference,common-code* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get nvidia mlperf inference common-code" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,nvidia,mlperf,inference,common-code` + +`cm run script --tags=get,nvidia,mlperf,inference,common-code[,variations] ` + +*or* + +`cmr "get nvidia mlperf inference common-code"` + +`cmr "get nvidia mlperf inference common-code [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,nvidia,mlperf,inference,common-code' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,nvidia,mlperf,inference,common-code"``` + +#### Run this script via Docker (beta) + +`cm docker script "get nvidia mlperf inference common-code[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**repo-owner**" +
+ Click here to expand this section. + + * `_ctuning` + - Workflow: + * `_custom` + - Workflow: + * `_mlcommons` + - Workflow: + * `_nvidia-only` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `r3.1` + +* `r2.1` +* `r3.0` +* `r3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/_cm.json)*** + * get,mlperf,inference,results + * CM names: `--adr.['mlperf-inference-results']...` + - CM script: [get-mlperf-inference-results](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results) + - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/_cm.json) + +___ +### Script output +`cmr "get nvidia mlperf inference common-code [,variations]" -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_MLPERF_INFERENCE_NVIDIA_CODE_PATH` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_INFERENCE_NVIDIA_CODE_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space.md new file mode 100644 index 000000000..845f71038 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space.md @@ -0,0 +1,162 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-nvidia-scratch-space** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-nvidia-scratch-space,0b2bec8b29fb4ab7) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,nvidia,scratch,space* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf inference nvidia scratch space" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,inference,nvidia,scratch,space` + +`cm run script --tags=get,mlperf,inference,nvidia,scratch,space[,variations] [--input_flags]` + +*or* + +`cmr "get mlperf inference nvidia scratch space"` + +`cmr "get mlperf inference nvidia scratch space [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,inference,nvidia,scratch,space' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,inference,nvidia,scratch,space"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf inference nvidia scratch space[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**version**" +
+ Click here to expand this section. + + * `_version.#` + - Environment variables: + - *CM_NVIDIA_SCRATCH_SPACE_VERSION*: `#` + - Workflow: + * **`_version.4_0`** (default) + - Environment variables: + - *CM_NVIDIA_SCRATCH_SPACE_VERSION*: `4_0` + - Workflow: + +
+ + +#### Default variations + +`_version.4_0` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--scratch_path=value` → `CM_NVIDIA_MLPERF_SCRATCH_PATH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "scratch_path":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/_cm.json) + +___ +### Script output +`cmr "get mlperf inference nvidia scratch space [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_NVIDIA_MLPERF_SCRATCH_PATH` +* `CM_NVIDIA_SCRATCH_SPACE_VERSION` +* `MLPERF_SCRATCH_PATH` +#### New environment keys auto-detected from customize + +* `CM_NVIDIA_MLPERF_SCRATCH_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir.md new file mode 100644 index 000000000..13e935209 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir.md @@ -0,0 +1,159 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-results-dir** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-results-dir,84f3c5aad5e1444b) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,results,dir,directory* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf inference results dir directory" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,inference,results,dir,directory` + +`cm run script --tags=get,mlperf,inference,results,dir,directory[,variations] [--input_flags]` + +*or* + +`cmr "get mlperf inference results dir directory"` + +`cmr "get mlperf inference results dir directory [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,inference,results,dir,directory' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,inference,results,dir,directory"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf inference results dir directory[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**version**" +
+ Click here to expand this section. + + * `_version.#` + - Environment variables: + - *CM_MLPERF_INFERENCE_RESULTS_VERSION*: `#` + - Workflow: + * **`_version.4_0`** (default) + - Environment variables: + - *CM_MLPERF_INFERENCE_RESULTS_VERSION*: `4_0` + - Workflow: + +
+ + +#### Default variations + +`_version.4_0` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--results_dir=value` → `CM_MLPERF_INFERENCE_RESULTS_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "results_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/_cm.json) + +___ +### Script output +`cmr "get mlperf inference results dir directory [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_INFERENCE_RESULTS_DIR` +* `CM_MLPERF_INFERENCE_RESULTS_VERSION` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_INFERENCE_RESULTS_DIR` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-results.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-results.md new file mode 100644 index 000000000..75b92569e --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-results.md @@ -0,0 +1,163 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-results** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-results,36bae5b25dbe41da) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,results,inference,inference-results,mlcommons,mlperf* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get results inference inference-results mlcommons mlperf" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,results,inference,inference-results,mlcommons,mlperf` + +`cm run script --tags=get,results,inference,inference-results,mlcommons,mlperf[,variations] ` + +*or* + +`cmr "get results inference inference-results mlcommons mlperf"` + +`cmr "get results inference inference-results mlcommons mlperf [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,results,inference,inference-results,mlcommons,mlperf' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,results,inference,inference-results,mlcommons,mlperf"``` + +#### Run this script via Docker (beta) + +`cm docker script "get results inference inference-results mlcommons mlperf[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**source-repo**" +
+ Click here to expand this section. + + * `_ctuning` + - Environment variables: + - *GITHUB_REPO_OWNER*: `ctuning` + - Workflow: + * `_custom` + - Environment variables: + - *GITHUB_REPO_OWNER*: `arjunsuresh` + - Workflow: + * **`_mlcommons`** (default) + - Environment variables: + - *GITHUB_REPO_OWNER*: `mlcommons` + - Workflow: + * `_nvidia-only` + - Environment variables: + - *GITHUB_REPO_OWNER*: `GATEOverflow` + - *NVIDIA_ONLY*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_mlcommons` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `master` +* CM_GIT_DEPTH: `--depth 1` +* CM_GIT_PATCH: `no` + +
+ +#### Versions +Default version: `v3.1` + +* `v2.1` +* `v3.0` +* `v3.1` +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/_cm.json)*** + * get,git,repo + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/_cm.json) + +___ +### Script output +`cmr "get results inference inference-results mlcommons mlperf [,variations]" -j` +#### New environment keys (filter) + +* `CM_MLPERF_INFERENCE_RESULTS_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_INFERENCE_RESULTS_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-src.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-src.md new file mode 100644 index 000000000..abc42a7a3 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-src.md @@ -0,0 +1,266 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-src** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-src,4b57186581024797) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,source,inference,inference-src,inference-source,mlperf,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src source inference inference-src inference-source mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,source,inference,inference-src,inference-source,mlperf,mlcommons` + +`cm run script --tags=get,src,source,inference,inference-src,inference-source,mlperf,mlcommons[,variations] ` + +*or* + +`cmr "get src source inference inference-src inference-source mlperf mlcommons"` + +`cmr "get src source inference inference-src inference-source mlperf mlcommons [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,source,inference,inference-src,inference-source,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,source,inference,inference-src,inference-source,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src source inference inference-src inference-source mlperf mlcommons[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_3d-unet` + - Environment variables: + - *CM_SUBMODULE_3D_UNET*: `yes` + - Workflow: + * `_deeplearningexamples` + - Environment variables: + - *CM_SUBMODULE_DEEPLEARNINGEXAMPLES*: `yes` + - Workflow: + * `_deepsparse` + - Environment variables: + - *CM_GIT_CHECKOUT*: `deepsparse` + - *CM_GIT_URL*: `https://github.com/neuralmagic/inference` + - *CM_MLPERF_LAST_RELEASE*: `v4.0` + - Workflow: + * `_gn` + - Environment variables: + - *CM_SUBMODULE_GN*: `yes` + - Workflow: + * `_no-recurse-submodules` + - Environment variables: + - *CM_GIT_RECURSE_SUBMODULES*: `` + - Workflow: + * `_nvidia-pycocotools` + - Environment variables: + - *CM_GIT_PATCH_FILENAME*: `coco.patch` + - Workflow: + * `_octoml` + - Environment variables: + - *CM_GIT_URL*: `https://github.com/octoml/inference` + - Workflow: + * `_openimages-nvidia-pycocotools` + - Environment variables: + - *CM_GIT_PATCH_FILENAME*: `openimages-pycocotools.patch` + - Workflow: + * `_patch` + - Environment variables: + - *CM_GIT_PATCH*: `yes` + - Workflow: + * `_pybind` + - Environment variables: + - *CM_SUBMODULE_PYBIND*: `yes` + - Workflow: + * `_recurse-submodules` + - Environment variables: + - *CM_GIT_RECURSE_SUBMODULES*: ` --recurse-submodules` + - Workflow: + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + * `_submodules.#` + - Environment variables: + - *CM_GIT_SUBMODULES*: `#` + - Workflow: + +
+ + + * Group "**checkout**" +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_SHA*: `#` + - Workflow: + +
+ + + * Group "**git-history**" +
+ Click here to expand this section. + + * `_full-history` + - Environment variables: + - *CM_GIT_DEPTH*: `` + - Workflow: + * **`_short-history`** (default) + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 10` + - Workflow: + +
+ + +#### Default variations + +`_short-history` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT_FOLDER: `inference` +* CM_GIT_DEPTH: `--depth 4` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: `` +* CM_GIT_URL: `https://github.com/mlcommons/inference.git` + +
+ +#### Versions +Default version: `master` + +* `custom` +* `deepsparse` +* `main` +* `master` +* `pybind_fix` +* `r2.1` +* `r3.0` +* `r3.1` +* `tvm` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/_cm.json)*** + * get,git,repo + * CM names: `--adr.['inference-git-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/_cm.json) + +___ +### Script output +`cmr "get src source inference inference-src inference-source mlperf mlcommons [,variations]" -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_MLPERF_INFERENCE_3DUNET_PATH` +* `CM_MLPERF_INFERENCE_BERT_PATH` +* `CM_MLPERF_INFERENCE_CLASSIFICATION_AND_DETECTION_PATH` +* `CM_MLPERF_INFERENCE_CONF_PATH` +* `CM_MLPERF_INFERENCE_DLRM_PATH` +* `CM_MLPERF_INFERENCE_DLRM_V2_PATH` +* `CM_MLPERF_INFERENCE_GPTJ_PATH` +* `CM_MLPERF_INFERENCE_RNNT_PATH` +* `CM_MLPERF_INFERENCE_SOURCE` +* `CM_MLPERF_INFERENCE_VERSION` +* `CM_MLPERF_INFERENCE_VISION_PATH` +* `CM_MLPERF_LAST_RELEASE` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_INFERENCE_3DUNET_PATH` +* `CM_MLPERF_INFERENCE_BERT_PATH` +* `CM_MLPERF_INFERENCE_CLASSIFICATION_AND_DETECTION_PATH` +* `CM_MLPERF_INFERENCE_CONF_PATH` +* `CM_MLPERF_INFERENCE_DLRM_PATH` +* `CM_MLPERF_INFERENCE_DLRM_V2_PATH` +* `CM_MLPERF_INFERENCE_GPTJ_PATH` +* `CM_MLPERF_INFERENCE_RNNT_PATH` +* `CM_MLPERF_INFERENCE_VISION_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir.md new file mode 100644 index 000000000..94a3aa684 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir.md @@ -0,0 +1,159 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-submission-dir** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-submission-dir,ddf36a41d6934a7e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,submission,dir,directory* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf inference submission dir directory" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,inference,submission,dir,directory` + +`cm run script --tags=get,mlperf,inference,submission,dir,directory[,variations] [--input_flags]` + +*or* + +`cmr "get mlperf inference submission dir directory"` + +`cmr "get mlperf inference submission dir directory [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,inference,submission,dir,directory' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,inference,submission,dir,directory"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf inference submission dir directory[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**version**" +
+ Click here to expand this section. + + * `_version.#` + - Environment variables: + - *CM_MLPERF_INFERENCE_SUBMISSION_VERSION*: `#` + - Workflow: + * **`_version.4_0`** (default) + - Environment variables: + - *CM_MLPERF_INFERENCE_SUBMISSION_VERSION*: `4_0` + - Workflow: + +
+ + +#### Default variations + +`_version.4_0` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "submission_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/_cm.json) + +___ +### Script output +`cmr "get mlperf inference submission dir directory [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_INFERENCE_SUBMISSION_DIR` +* `CM_MLPERF_INFERENCE_SUBMISSION_VERSION` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_INFERENCE_SUBMISSION_DIR` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs.md new file mode 100644 index 000000000..9d5a01f19 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs.md @@ -0,0 +1,161 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-sut-configs** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-sut-configs,c2fbf72009e2445b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,sut,configs,sut-configs* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf inference sut configs sut-configs" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,inference,sut,configs,sut-configs` + +`cm run script --tags=get,mlperf,inference,sut,configs,sut-configs[,variations] [--input_flags]` + +*or* + +`cmr "get mlperf inference sut configs sut-configs"` + +`cmr "get mlperf inference sut configs sut-configs [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,inference,sut,configs,sut-configs' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,inference,sut,configs,sut-configs"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf inference sut configs sut-configs[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_octoml` + - Environment variables: + - *CM_SUT_USE_EXTERNAL_CONFIG_REPO*: `yes` + - *CM_GIT_CHECKOUT_FOLDER*: `configs` + - *CM_GIT_URL*: `https://github.com/arjunsuresh/mlperf-inference-configs` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * get,git,repo,_repo.mlperf_inference_configs_octoml + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--configs_git_url=value` → `CM_GIT_URL=value` +* `--repo_path=value` → `CM_SUT_CONFIGS_PATH=value` +* `--run_config=value` → `CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "configs_git_url":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SUT_CONFIGS_PATH: `` +* CM_GIT_URL: `` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/_cm.json) + +___ +### Script output +`cmr "get mlperf inference sut configs sut-configs [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_HW_*` +* `CM_SUT_*` +#### New environment keys auto-detected from customize + +* `CM_HW_NAME` +* `CM_SUT_NAME` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description.md new file mode 100644 index 000000000..4f2f559a5 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description.md @@ -0,0 +1,159 @@ +Automatically generated README for this automation recipe: **get-mlperf-inference-sut-description** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-sut-description,e49a3f758b2d4e7b) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,sut,description,system-under-test,system-description* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf sut description system-under-test system-description" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,sut,description,system-under-test,system-description` + +`cm run script --tags=get,mlperf,sut,description,system-under-test,system-description [--input_flags]` + +*or* + +`cmr "get mlperf sut description system-under-test system-description"` + +`cmr "get mlperf sut description system-under-test system-description " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,sut,description,system-under-test,system-description' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,sut,description,system-under-test,system-description"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf sut description system-under-test system-description" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--name=value` → `CM_HW_NAME=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "name":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SUT_DESC_CACHE: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,compiler + * CM names: `--adr.['compiler']...` + - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) + * get,cuda-devices + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['gpu', 'cuda']}` + - CM script: [get-cuda-devices](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda-devices) + * detect,sudo + * Enable this dependency only if all ENV vars are set:
+`{'CM_DETERMINE_MEMORY_CONFIGURATION': ['yes'], 'CM_HOST_OS_TYPE': ['linux']}` + - CM script: [detect-sudo](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-sudo) + * get,generic-python-lib,_package.dmiparser + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/_cm.json) + +___ +### Script output +`cmr "get mlperf sut description system-under-test system-description " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_HW_*` +* `CM_SUT_*` +#### New environment keys auto-detected from customize + +* `CM_HW_NAME` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-logging.md b/docs/MLPerf-benchmark-support/get-mlperf-logging.md new file mode 100644 index 000000000..02dea1217 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-logging.md @@ -0,0 +1,127 @@ +Automatically generated README for this automation recipe: **get-mlperf-logging** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-logging,c9830dc6f87b4dc6) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,logging,mlperf-logging* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get mlperf logging mlperf-logging" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,mlperf,logging,mlperf-logging` + +`cm run script --tags=get,mlperf,logging,mlperf-logging ` + +*or* + +`cmr "get mlperf logging mlperf-logging"` + +`cmr "get mlperf logging mlperf-logging " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,mlperf,logging,mlperf-logging' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,mlperf,logging,mlperf-logging"``` + +#### Run this script via Docker (beta) + +`cm docker script "get mlperf logging mlperf-logging" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,repo,_repo.https://github.com/mlcommons/logging + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/_cm.json) + +___ +### Script output +`cmr "get mlperf logging mlperf-logging " -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_MLPERF_LOGGING_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_LOGGING_SRC_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-power-dev.md b/docs/MLPerf-benchmark-support/get-mlperf-power-dev.md new file mode 100644 index 000000000..0df4b325b --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-power-dev.md @@ -0,0 +1,171 @@ +Automatically generated README for this automation recipe: **get-mlperf-power-dev** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-power-dev,72aa56768c994bcf) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,source,power,power-dev,mlperf,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src source power power-dev mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,source,power,power-dev,mlperf,mlcommons` + +`cm run script --tags=get,src,source,power,power-dev,mlperf,mlcommons[,variations] ` + +*or* + +`cmr "get src source power power-dev mlperf mlcommons"` + +`cmr "get src source power power-dev mlperf mlcommons [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,source,power,power-dev,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,source,power,power-dev,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src source power power-dev mlperf mlcommons[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**checkout**" +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * **`_mlcommons`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/mlcommons/power-dev.git` + - Workflow: + * `_octoml` + - Environment variables: + - *CM_GIT_URL*: `https://github.com/octoml/power-dev.git` + - Workflow: + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + +
+ + +#### Default variations + +`_mlcommons` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_DEPTH: `--depth 1` +* CM_GIT_PATCH: `no` +* CM_GIT_CHECKOUT_FOLDER: `power-dev` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/_cm.json)*** + * get,git,repo + * CM names: `--adr.['mlperf-power-dev-git-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/_cm.json) + +___ +### Script output +`cmr "get src source power power-dev mlperf mlcommons [,variations]" -j` +#### New environment keys (filter) + +* `CM_MLPERF_POWER_SOURCE` +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src.md b/docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src.md new file mode 100644 index 000000000..f06d6d2aa --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src.md @@ -0,0 +1,129 @@ +Automatically generated README for this automation recipe: **get-mlperf-tiny-eembc-energy-runner-src** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-tiny-eembc-energy-runner-src,c7da8d1ce4164a4b) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner` + +`cm run script --tags=get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner ` + +*or* + +`cmr "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner"` + +`cmr "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `main` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: `` +* CM_GIT_URL: `https://github.com/eembc/energyrunner` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/_cm.json) + +___ +### Script output +`cmr "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner " -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_EEMBC_ENERGY_RUNNER_*` +#### New environment keys auto-detected from customize + +* `CM_EEMBC_ENERGY_RUNNER_DATASETS` +* `CM_EEMBC_ENERGY_RUNNER_SESSIONS` +* `CM_EEMBC_ENERGY_RUNNER_SRC` +* `CM_EEMBC_ENERGY_RUNNER_SRC_DATASETS` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-tiny-src.md b/docs/MLPerf-benchmark-support/get-mlperf-tiny-src.md new file mode 100644 index 000000000..7706dac15 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-tiny-src.md @@ -0,0 +1,143 @@ +Automatically generated README for this automation recipe: **get-mlperf-tiny-src** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-tiny-src,777843a0bb034524) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons` + +`cm run script --tags=get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons ` + +*or* + +`cmr "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons"` + +`cmr "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `master` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: `` +* CM_GIT_URL: `https://github.com/mlcommons/tiny.git` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/_cm.json) + +___ +### Script output +`cmr "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons " -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_MLPERF_TINY_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_TINY_BENCHMARK` +* `CM_MLPERF_TINY_DATASETS` +* `CM_MLPERF_TINY_DATASETS_AD` +* `CM_MLPERF_TINY_DATASETS_IC` +* `CM_MLPERF_TINY_DATASETS_KWS` +* `CM_MLPERF_TINY_DATASETS_KWS_OPEN` +* `CM_MLPERF_TINY_DATASETS_VWW` +* `CM_MLPERF_TINY_SRC` +* `CM_MLPERF_TINY_TRAINING` +* `CM_MLPERF_TINY_TRAINING_AD` +* `CM_MLPERF_TINY_TRAINING_IC` +* `CM_MLPERF_TINY_TRAINING_KWS` +* `CM_MLPERF_TINY_TRAINING_VWW` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code.md b/docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code.md new file mode 100644 index 000000000..e29373502 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code.md @@ -0,0 +1,158 @@ +Automatically generated README for this automation recipe: **get-mlperf-training-nvidia-code** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-training-nvidia-code,fdc630b1d41743c5) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,nvidia,mlperf,training,code,training-code* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get nvidia mlperf training code training-code" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,nvidia,mlperf,training,code,training-code` + +`cm run script --tags=get,nvidia,mlperf,training,code,training-code[,variations] ` + +*or* + +`cmr "get nvidia mlperf training code training-code"` + +`cmr "get nvidia mlperf training code training-code [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,nvidia,mlperf,training,code,training-code' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,nvidia,mlperf,training,code,training-code"``` + +#### Run this script via Docker (beta) + +`cm docker script "get nvidia mlperf training code training-code[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**repo-owner**" +
+ Click here to expand this section. + + * `_ctuning` + - Environment variables: + - *CM_TMP_TRAINING_SRC*: `ctuning` + - Workflow: + * `_custom` + - Workflow: + * **`_mlcommons`** (default) + - Environment variables: + - *CM_TMP_TRAINING_SRC*: `mlcommons` + - Workflow: + * `_nvidia-only` + - Environment variables: + - *CM_TMP_TRAINING_SRC*: `GATEOverflow` + - Workflow: + +
+ + +#### Default variations + +`_mlcommons` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `r3.0` + +* `r2.1` +* `r3.0` +* `r3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/_cm.json)*** + * get,git,repo + * CM names: `--adr.['mlperf-training-results']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/_cm.json) + +___ +### Script output +`cmr "get nvidia mlperf training code training-code [,variations]" -j` +#### New environment keys (filter) + +* `CM_MLPERF_TRAINING_NVIDIA_CODE_PATH` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_TRAINING_NVIDIA_CODE_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-training-src.md b/docs/MLPerf-benchmark-support/get-mlperf-training-src.md new file mode 100644 index 000000000..aaecc7818 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-mlperf-training-src.md @@ -0,0 +1,224 @@ +Automatically generated README for this automation recipe: **get-mlperf-training-src** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-training-src,dc440bd88e794a28) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,source,training,training-src,training-source,mlperf,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src source training training-src training-source mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,source,training,training-src,training-source,mlperf,mlcommons` + +`cm run script --tags=get,src,source,training,training-src,training-source,mlperf,mlcommons[,variations] ` + +*or* + +`cmr "get src source training training-src training-source mlperf mlcommons"` + +`cmr "get src source training training-src training-source mlperf mlcommons [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,source,training,training-src,training-source,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,source,training,training-src,training-source,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src source training training-src training-source mlperf mlcommons[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_no-recurse-submodules` + - Environment variables: + - *CM_GIT_RECURSE_SUBMODULES*: `` + - Workflow: + * `_nvidia-retinanet` + - Environment variables: + - *CM_GIT_PATCH_FILENAMES*: `nvidia-retinanet.patch,cpu_load.patch` + - Workflow: + * `_patch` + - Environment variables: + - *CM_GIT_PATCH*: `yes` + - Workflow: + +
+ + + * Group "**checkout**" +
+ Click here to expand this section. + + * `_branch.#` + - Environment variables: + - *CM_GIT_CHECKOUT*: `#` + - Workflow: + * `_sha.#` + - Environment variables: + - *CM_GIT_SHA*: `#` + - Workflow: + * `_tag.#` + - Environment variables: + - *CM_GIT_CHECKOUT_TAG*: `#` + - Workflow: + +
+ + + * Group "**git-history**" +
+ Click here to expand this section. + + * `_full-history` + - Environment variables: + - *CM_GIT_DEPTH*: `` + - Workflow: + * **`_short-history`** (default) + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 5` + - Workflow: + +
+ + + * Group "**repo**" +
+ Click here to expand this section. + + * `_repo.#` + - Environment variables: + - *CM_GIT_URL*: `#` + - Workflow: + +
+ + + * Group "**src**" +
+ Click here to expand this section. + + * **`_cknowledge`** (default) + - Environment variables: + - *CM_GIT_URL*: `https://github.com/cknowledge/training.git` + - Workflow: + * `_mlcommons` + - Environment variables: + - *CM_GIT_URL*: `https://github.com/mlcommons/training.git` + - Workflow: + +
+ + +#### Default variations + +`_cknowledge,_short-history` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `master` +* CM_GIT_DEPTH: `--depth 4` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: ` --recurse-submodules` +* CM_GIT_CHECKOUT_FOLDER: `training` + +
+ +#### Versions +Default version: `master` + +* `custom` +* `master` +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/_cm.json)*** + * get,git,repo + * CM names: `--adr.['mlperf-training-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/_cm.json) + +___ +### Script output +`cmr "get src source training training-src training-source mlperf mlcommons [,variations]" -j` +#### New environment keys (filter) + +* `+PYTHONPATH` +* `CM_MLPERF_TRAINING_*` +* `CM_MLPERF_TRAINING_LAST_RELEASE` +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/get-nvidia-mitten.md b/docs/MLPerf-benchmark-support/get-nvidia-mitten.md new file mode 100644 index 000000000..f6467da3f --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-nvidia-mitten.md @@ -0,0 +1,132 @@ +Automatically generated README for this automation recipe: **get-nvidia-mitten** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-nvidia-mitten,1c045f2902374de9) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,nvidia,mitten,nvidia-mitten* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get nvidia mitten nvidia-mitten" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,nvidia,mitten,nvidia-mitten` + +`cm run script --tags=get,nvidia,mitten,nvidia-mitten ` + +*or* + +`cmr "get nvidia mitten nvidia-mitten"` + +`cmr "get nvidia mitten nvidia-mitten " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,nvidia,mitten,nvidia-mitten' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,nvidia,mitten,nvidia-mitten"``` + +#### Run this script via Docker (beta) + +`cm docker script "get nvidia mitten nvidia-mitten" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `master` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_pycuda + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,git,_repo.https://github.com/NVIDIA/mitten + * CM names: `--adr.['nvidia-mitten-git-src']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/_cm.json) + +___ +### Script output +`cmr "get nvidia mitten nvidia-mitten " -j` +#### New environment keys (filter) + +* `CM_NVIDIA_MITTEN*` +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/get-spec-ptd.md b/docs/MLPerf-benchmark-support/get-spec-ptd.md new file mode 100644 index 000000000..5c2797227 --- /dev/null +++ b/docs/MLPerf-benchmark-support/get-spec-ptd.md @@ -0,0 +1,164 @@ +Automatically generated README for this automation recipe: **get-spec-ptd** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-spec-ptd,7423a878e4524136) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons` + +`cm run script --tags=get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons [--input_flags]` + +*or* + +`cmr "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons"` + +`cmr "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons " [--input_flags]` + + + +#### Input Flags + +* --**input**=Path to SPEC PTDaemon (Optional) + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `CM_INPUT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_GIT_CHECKOUT: `main` +* CM_GIT_DEPTH: `--depth 1` +* CM_GIT_PATCH: `no` +* CM_GIT_RECURSE_SUBMODULES: ` ` +* CM_GIT_URL: `https://github.com/mlcommons/power.git` + +
+ +#### Versions +Default version: `main` + +* `custom` +* `main` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,repo,_repo.https://github.com/mlcommons/power + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/_cm.json) + +___ +### Script output +`cmr "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_PTD_PATH` +* `CM_SPEC_PTD_PATH` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_PTD_PATH` +* `CM_SPEC_PTD_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment.md b/docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment.md new file mode 100644 index 000000000..f7708790e --- /dev/null +++ b/docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment.md @@ -0,0 +1,152 @@ +Automatically generated README for this automation recipe: **import-mlperf-inference-to-experiment** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=import-mlperf-inference-to-experiment,72099fa962ea499c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "import mlperf inference mlperf-inference experiment 2experiment to-experiment" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment` + +`cm run script --tags=import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment[,variations] [--input_flags]` + +*or* + +`cmr "import mlperf inference mlperf-inference experiment 2experiment to-experiment"` + +`cmr "import mlperf inference mlperf-inference experiment 2experiment to-experiment [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment"``` + +#### Run this script via Docker (beta) + +`cm docker script "import mlperf inference mlperf-inference experiment 2experiment to-experiment[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_skip_checker` + - Environment variables: + - *CM_SKIP_SUBMISSION_CHECKER*: `True` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` +* `--target_repo=value` → `CM_IMPORT_MLPERF_INFERENCE_TARGET_REPO=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "submitter":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/_cm.yaml) + +___ +### Script output +`cmr "import mlperf inference mlperf-inference experiment 2experiment to-experiment [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment.md b/docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment.md new file mode 100644 index 000000000..99d55bc71 --- /dev/null +++ b/docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment.md @@ -0,0 +1,135 @@ +Automatically generated README for this automation recipe: **import-mlperf-tiny-to-experiment** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=import-mlperf-tiny-to-experiment,83e3efd7611f469b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment` + +`cm run script --tags=import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment [--input_flags]` + +*or* + +`cmr "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment"` + +`cmr "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment"``` + +#### Run this script via Docker (beta) + +`cm docker script "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--target_repo=value` → `CM_IMPORT_TINYMLPERF_TARGET_REPO=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "target_repo":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/_cm.yaml) + +___ +### Script output +`cmr "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment.md b/docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment.md new file mode 100644 index 000000000..e5f76bed7 --- /dev/null +++ b/docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment.md @@ -0,0 +1,141 @@ +Automatically generated README for this automation recipe: **import-mlperf-training-to-experiment** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=import-mlperf-training-to-experiment,b13d9b7337414f17) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "import mlperf training mlperf-training experiment 2experiment to-experiment" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment` + +`cm run script --tags=import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment [--input_flags]` + +*or* + +`cmr "import mlperf training mlperf-training experiment 2experiment to-experiment"` + +`cmr "import mlperf training mlperf-training experiment 2experiment to-experiment " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment"``` + +#### Run this script via Docker (beta) + +`cm docker script "import mlperf training mlperf-training experiment 2experiment to-experiment" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--target_repo=value` → `CM_IMPORT_MLPERF_TRAINING_TARGET_REPO=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "target_repo":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,logging + - CM script: [get-mlperf-logging](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-logging) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/_cm.yaml) + 1. ***Run native script if exists*** + * [run_mlperf_logger.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/run_mlperf_logger.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/_cm.yaml) + +___ +### Script output +`cmr "import mlperf training mlperf-training experiment 2experiment to-experiment " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/install-mlperf-logging-from-src.md b/docs/MLPerf-benchmark-support/install-mlperf-logging-from-src.md new file mode 100644 index 000000000..885a883f2 --- /dev/null +++ b/docs/MLPerf-benchmark-support/install-mlperf-logging-from-src.md @@ -0,0 +1,126 @@ +Automatically generated README for this automation recipe: **install-mlperf-logging-from-src** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-mlperf-logging-from-src,f67cb84a5dc942c3) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *install,mlperf,logging,from.src* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install mlperf logging from.src" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,mlperf,logging,from.src` + +`cm run script --tags=install,mlperf,logging,from.src ` + +*or* + +`cmr "install mlperf logging from.src"` + +`cmr "install mlperf logging from.src " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,mlperf,logging,from.src' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,mlperf,logging,from.src"``` + +#### Run this script via Docker (beta) + +`cm docker script "install mlperf logging from.src" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +* `master` +* `v3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/_cm.yaml)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,git,repo,_repo.https://github.com/mlcommons/logging + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/_cm.yaml) + +___ +### Script output +`cmr "install mlperf logging from.src " -j` +#### New environment keys (filter) + +* `CM_MLPERF_LOGGING_REPO_PATH` +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/prepare-training-data-bert.md b/docs/MLPerf-benchmark-support/prepare-training-data-bert.md new file mode 100644 index 000000000..fc0386cbd --- /dev/null +++ b/docs/MLPerf-benchmark-support/prepare-training-data-bert.md @@ -0,0 +1,193 @@ +Automatically generated README for this automation recipe: **prepare-training-data-bert** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=prepare-training-data-bert,1e06a7abe23545eb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *prepare,mlperf,training,data,input,bert* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "prepare mlperf training data input bert" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=prepare,mlperf,training,data,input,bert` + +`cm run script --tags=prepare,mlperf,training,data,input,bert[,variations] [--input_flags]` + +*or* + +`cmr "prepare mlperf training data input bert"` + +`cmr "prepare mlperf training data input bert [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'prepare,mlperf,training,data,input,bert' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="prepare,mlperf,training,data,input,bert"``` + +#### Run this script via Docker (beta) + +`cm docker script "prepare mlperf training data input bert[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**implementation**" +
+ Click here to expand this section. + + * **`_nvidia`** (default) + - Environment variables: + - *CM_TMP_VARIATION*: `nvidia` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,git,repo,_repo.https://github.com/wchen61/training_results_v2.1,_branch.fix_bert_prepare_data + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * `_reference` + - Environment variables: + - *CM_TMP_VARIATION*: `reference` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,mlperf,training,src + * CM names: `--adr.['mlperf-training-src']...` + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + * get,python3 + * CM names: `--adr.['python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_tensorflow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_protobuf + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + +#### Default variations + +`_nvidia` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--clean=value` → `CM_MLPERF_TRAINING_CLEAN_TFRECORDS=value` +* `--data_dir=value` → `CM_DATA_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "clean":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/_cm.json)*** + * download,file,_gdown,_url.https://drive.google.com/uc?id=1fbGClQMi2CoMv7fwrwTC5YYPooQBdcFW + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_gdown,_url.https://drive.google.com/uc?id=1USK108J6hMM_d27xCHi738qBL8_BT1u1 + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_gdown,_url.https://drive.google.com/uc?id=1tmMgLwoBvbEJEHXh77sqrXYw5RpqT8R_ + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download-and-extract,file,_gdown,_extract,_url.https://drive.google.com/uc?id=14xV2OUGSQDG_yDBrmbSdcDC-QGeqpfs_ + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + * download,file,_gdown,_url.https://drive.google.com/uc?id=1chiTBljF0Eh1U5pKs6ureVHgSbtU8OG_ + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_gdown,_url.https://drive.google.com/uc?id=1Q47V3K3jFRkbJ2zGCrKkKk-n0fvMZsa0 + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_gdown,_url.https://drive.google.com/uc?id=1vAcVmXSLsLeQ1q7gvHnQUSth5W_f_pwv + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. ***Run native script if exists*** + * [run-nvidia.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/run-nvidia.sh) + * [run-reference.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/run-reference.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/_cm.json) + +___ +### Script output +`cmr "prepare mlperf training data input bert [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_TRAINING_BERT_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_TRAINING_BERT_CONFIG_PATH` +* `CM_MLPERF_TRAINING_BERT_DATA_PATH` +* `CM_MLPERF_TRAINING_BERT_TFRECORDS_PATH` +* `CM_MLPERF_TRAINING_BERT_VOCAB_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/prepare-training-data-resnet.md b/docs/MLPerf-benchmark-support/prepare-training-data-resnet.md new file mode 100644 index 000000000..1b2907e22 --- /dev/null +++ b/docs/MLPerf-benchmark-support/prepare-training-data-resnet.md @@ -0,0 +1,206 @@ +Automatically generated README for this automation recipe: **prepare-training-data-resnet** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=prepare-training-data-resnet,d42a8a8ca2704f9f) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *prepare,mlperf,training,data,input,resnet* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "prepare mlperf training data input resnet" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=prepare,mlperf,training,data,input,resnet` + +`cm run script --tags=prepare,mlperf,training,data,input,resnet[,variations] [--input_flags]` + +*or* + +`cmr "prepare mlperf training data input resnet"` + +`cmr "prepare mlperf training data input resnet [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'prepare,mlperf,training,data,input,resnet' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="prepare,mlperf,training,data,input,resnet"``` + +#### Run this script via Docker (beta) + +`cm docker script "prepare mlperf training data input resnet[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_mxnet.#` + - Environment variables: + - *CM_MXNET_VERSION*: `#` + - Workflow: + +
+ + + * Group "**implementation**" +
+ Click here to expand this section. + + * **`_nvidia`** (default) + - Environment variables: + - *CM_TMP_VARIATION*: `nvidia` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,mlperf,training,nvidia,code + * CM names: `--adr.['nvidia-training-code']...` + - CM script: [get-mlperf-training-nvidia-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-nvidia-code) + * get,git,repo,_repo.https://github.com/NVIDIA/DeepLearningExamples,_sha.81ee705868a11d6fe18c12d237abe4a08aab5fd6 + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * `_reference` + - Environment variables: + - *CM_TMP_VARIATION*: `reference` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,mlperf,training,src + * CM names: `--adr.['mlperf-training-src']...` + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + * get,python3 + * CM names: `--adr.['python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_tensorflow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_protobuf + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + +#### Default variations + +`_nvidia` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--data_dir=value` → `CM_DATA_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "data_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/_cm.json)*** + * get,dataset,imagenet,train + * CM names: `--adr.['imagenet-train']...` + - CM script: [get-dataset-imagenet-train](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-train) + * get,dataset,imagenet,val,original,_full + * CM names: `--adr.['imagenet-val']...` + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + * get,generic-sys-util,_rsync + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/_cm.json)*** + * download,file,_wget,_url.https://raw.githubusercontent.com/tensorflow/models/master/research/slim/datasets/imagenet_2012_validation_synset_labels.txt + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download,file,_wget,_url.https://raw.githubusercontent.com/tensorflow/tpu/master/tools/datasets/imagenet_to_gcs.py + * Enable this dependency only if all ENV vars are set:
+`{'CM_TMP_VARIATION': ['reference']}` + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + 1. ***Run native script if exists*** + * [run-nvidia.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/run-nvidia.sh) + * [run-reference.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/run-reference.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/_cm.json) + +___ +### Script output +`cmr "prepare mlperf training data input resnet [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_TRAINING_NVIDIA_RESNET_PREPROCESSED_PATH` +* `CM_MLPERF_TRAINING_RESNET_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_TRAINING_NVIDIA_RESNET_PREPROCESSED_PATH` +* `CM_MLPERF_TRAINING_RESNET_DATA_PATH` +* `CM_MLPERF_TRAINING_RESNET_TFRECORDS_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission.md b/docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission.md new file mode 100644 index 000000000..20b71fd73 --- /dev/null +++ b/docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission.md @@ -0,0 +1,144 @@ +Automatically generated README for this automation recipe: **preprocess-mlperf-inference-submission** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=preprocess-mlperf-inference-submission,c23068394a314266) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess` + +`cm run script --tags=run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess [--input_flags]` + +*or* + +`cmr "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess"` + +`cmr "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "submission_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src', 'submission-checker-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlperf,submission,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` + * CM names: `--adr.['get-mlperf-submission-dir']...` + - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/_cm.json) + +___ +### Script output +`cmr "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/process-mlperf-accuracy.md b/docs/MLPerf-benchmark-support/process-mlperf-accuracy.md new file mode 100644 index 000000000..47b3f0b5f --- /dev/null +++ b/docs/MLPerf-benchmark-support/process-mlperf-accuracy.md @@ -0,0 +1,334 @@ +Automatically generated README for this automation recipe: **process-mlperf-accuracy** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=process-mlperf-accuracy,6e809013816b42ea) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlperf mlcommons accuracy mlc process process-accuracy" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy` + +`cm run script --tags=run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy[,variations] [--input_flags]` + +*or* + +`cmr "run mlperf mlcommons accuracy mlc process process-accuracy"` + +`cmr "run mlperf mlcommons accuracy mlc process process-accuracy [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlperf mlcommons accuracy mlc process process-accuracy[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_default-pycocotools,openimages` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_pycocotools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,mlcommons,mlperf,inference,src,-_openimages-nvidia-pycocotools + * CM names: `--adr.['for-pycocotools', 'accuracy-check-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * `_nvidia-pycocotools,openimages` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_nvidia-pycocotools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,mlcommons,mlperf,inference,src,_openimages-nvidia-pycocotools + * CM names: `--adr.['for-pycocotools', 'accuracy-check-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + +
+ + + * Group "**coco-evaluation-tool**" +
+ Click here to expand this section. + + * **`_default-pycocotools`** (default) + - Workflow: + * `_nvidia-pycocotools` + - Workflow: + +
+ + + * Group "**dataset**" +
+ Click here to expand this section. + + * `_cnndm` + - Environment variables: + - *CM_DATASET*: `cnndm` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset,cnndm,_validation + - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) + * get,generic-python-lib,_package.rouge_score + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.nltk + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.evaluate + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.absl-py + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.rouge_score + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_coco2014` + - Environment variables: + - *CM_DATASET*: `coco2014` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset,coco2014,original + * CM names: `--adr.['coco2014-dataset', 'coco2014-original']...` + - CM script: [get-dataset-coco2014](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-coco2014) + * **`_imagenet`** (default) + - Environment variables: + - *CM_DATASET*: `imagenet` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset-aux,image-classification,imagenet-aux + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_kits19` + - Environment variables: + - *CM_DATASET*: `kits19` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset,preprocessed,medical-imaging,kits19 + - CM script: [get-preprocessed-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-kits19) + * `_librispeech` + - Environment variables: + - *CM_DATASET*: `librispeech` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset,preprocessed,speech-recognition,librispeech + - CM script: [get-preprocessed-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-librispeech) + * `_open-orca` + - Environment variables: + - *CM_DATASET*: `openorca` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset,openorca,preprocessed + * CM names: `--adr.['openorca-dataset']...` + - CM script: [get-preprocessed-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openorca) + * get,ml-model,llama2 + * CM names: `--adr.['llama2-model']...` + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + * `_openimages` + - Environment variables: + - *CM_DATASET*: `openimages` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset-aux,openimages,annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_RUN_STYLE': ['valid']}` + - CM script: [get-dataset-openimages-annotations](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-annotations) + * get,dataset,openimages,original + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_RUN_STYLE': ['valid']}` + * CM names: `--adr.['openimages-original']...` + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + * get,generic-python-lib,_package.kiwisolver + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_squad` + - Environment variables: + - *CM_DATASET*: `squad` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_boto3 + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,dataset,squad,language-processing + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_SQUAD_VAL_PATH': []}` + - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) + * get,dataset-aux,squad-vocab + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH': ['on']}` + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tokenization + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_terabyte` + - Environment variables: + - *CM_DATASET*: `squad` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_ujson + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_scikit-learn + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_float16` + - Environment variables: + - *CM_ACCURACY_DTYPE*: `float16` + - Workflow: + * **`_float32`** (default) + - Environment variables: + - *CM_ACCURACY_DTYPE*: `float32` + - Workflow: + * `_float64` + - Environment variables: + - *CM_ACCURACY_DTYPE*: `float64` + - Workflow: + * `_int16` + - Environment variables: + - *CM_ACCURACY_DTYPE*: `int16` + - Workflow: + * `_int32` + - Environment variables: + - *CM_ACCURACY_DTYPE*: `int32` + - Workflow: + * `_int64` + - Environment variables: + - *CM_ACCURACY_DTYPE*: `int64` + - Workflow: + * `_int8` + - Environment variables: + - *CM_ACCURACY_DTYPE*: `int8` + - Workflow: + +
+ + +#### Default variations + +`_default-pycocotools,_float32,_imagenet` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--result_dir=value` → `CM_MLPERF_ACCURACY_RESULTS_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "result_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src', 'accuracy-check-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/_cm.json) + +___ +### Script output +`cmr "run mlperf mlcommons accuracy mlc process process-accuracy [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github.md b/docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github.md new file mode 100644 index 000000000..ebca5bece --- /dev/null +++ b/docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github.md @@ -0,0 +1,150 @@ +Automatically generated README for this automation recipe: **push-mlperf-inference-results-to-github** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=push-mlperf-inference-results-to-github,36c2ffd5df5d453a) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *push,mlperf,mlperf-inference-results,publish-results,inference,submission,github* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "push mlperf mlperf-inference-results publish-results inference submission github" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=push,mlperf,mlperf-inference-results,publish-results,inference,submission,github` + +`cm run script --tags=push,mlperf,mlperf-inference-results,publish-results,inference,submission,github [--input_flags]` + +*or* + +`cmr "push mlperf mlperf-inference-results publish-results inference submission github"` + +`cmr "push mlperf mlperf-inference-results publish-results inference submission github " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'push,mlperf,mlperf-inference-results,publish-results,inference,submission,github' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="push,mlperf,mlperf-inference-results,publish-results,inference,submission,github"``` + +#### Run this script via Docker (beta) + +`cm docker script "push mlperf mlperf-inference-results publish-results inference submission github" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--branch=value` → `CM_GIT_BRANCH=value` +* `--commit_message=value` → `CM_MLPERF_RESULTS_REPO_COMMIT_MESSAGE=value` +* `--repo_branch=value` → `CM_GIT_BRANCH=value` +* `--repo_url=value` → `CM_MLPERF_RESULTS_GIT_REPO_URL=value` +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "branch":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_RESULTS_GIT_REPO_URL: `https://github.com/ctuning/mlperf_inference_submissions_v4.0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-sys-util,_rsync + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,mlperf,submission,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` + * CM names: `--adr.['get-mlperf-submission-dir']...` + - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/_cm.json)*** + * get,git,repo + * CM names: `--adr.['get-git-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/_cm.json) + +___ +### Script output +`cmr "push mlperf mlperf-inference-results publish-results inference submission github " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-all-mlperf-models.md b/docs/MLPerf-benchmark-support/run-all-mlperf-models.md new file mode 100644 index 000000000..01f5427b1 --- /dev/null +++ b/docs/MLPerf-benchmark-support/run-all-mlperf-models.md @@ -0,0 +1,237 @@ +
+Click here to see the table of contents. + +* [About](#about) +* [Summary](#summary) +* [Reuse this script in your project](#reuse-this-script-in-your-project) + * [ Install CM automation language](#install-cm-automation-language) + * [ Check CM script flags](#check-cm-script-flags) + * [ Run this script from command line](#run-this-script-from-command-line) + * [ Run this script from Python](#run-this-script-from-python) + * [ Run this script via GUI](#run-this-script-via-gui) + * [ Run this script via Docker (beta)](#run-this-script-via-docker-(beta)) +* [Customization](#customization) + * [ Variations](#variations) + * [ Default environment](#default-environment) +* [Script workflow, dependencies and native scripts](#script-workflow-dependencies-and-native-scripts) +* [Script output](#script-output) +* [New environment keys (filter)](#new-environment-keys-(filter)) +* [New environment keys auto-detected from customize](#new-environment-keys-auto-detected-from-customize) +* [Maintainers](#maintainers) + +
+ +*Note that this README is automatically generated - don't edit!* + +### About + +#### Summary + +* Category: *MLPerf benchmark support.* +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* CM "database" tags to find this script: *run,natively,all,mlperf-models* +* Output cached? *False* +___ +### Reuse this script in your project + +#### Install CM automation language + +* [Installation guide](https://github.com/mlcommons/ck/blob/master/docs/installation.md) +* [CM intro](https://doi.org/10.5281/zenodo.8105339) + +#### Pull CM repository with this automation + +```cm pull repo mlcommons@cm4mlops --checkout=dev``` + + +#### Run this script from command line + +1. `cm run script --tags=run,natively,all,mlperf-models[,variations] ` + +2. `cmr "run natively all mlperf-models[ variations]" ` + +* `variations` can be seen [here](#variations) + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,natively,all,mlperf-models' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,natively,all,mlperf-models"``` + +Use this [online GUI](https://cKnowledge.org/cm-gui/?tags=run,natively,all,mlperf-models) to generate CM CMD. + +#### Run this script via Docker (beta) + +`cm docker script "run natively all mlperf-models[ variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_phoenix,reference` + - Workflow: + +
+ + + * Group "**implementation**" +
+ Click here to expand this section. + + * `_deepsparse` + - Environment variables: + - *DIVISION*: `open` + - *IMPLEMENTATION*: `deepsparse` + - Workflow: + * `_intel` + - Environment variables: + - *IMPLEMENTATION*: `intel` + - Workflow: + * `_mil` + - Environment variables: + - *IMPLEMENTATION*: `mil` + - Workflow: + * `_nvidia` + - Environment variables: + - *IMPLEMENTATION*: `nvidia` + - Workflow: + * `_qualcomm` + - Environment variables: + - *IMPLEMENTATION*: `qualcomm` + - Workflow: + * `_reference` + - Environment variables: + - *IMPLEMENTATION*: `reference` + - Workflow: + * `_tflite-cpp` + - Environment variables: + - *IMPLEMENTATION*: `tflite_cpp` + - Workflow: + +
+ + + * Group "**power**" +
+ Click here to expand this section. + + * **`_performance-only`** (default) + - Workflow: + * `_power` + - Environment variables: + - *POWER*: `True` + - Workflow: + +
+ + + * Group "**sut**" +
+ Click here to expand this section. + + * `_macbookpro-m1` + - Environment variables: + - *CATEGORY*: `edge` + - *DIVISION*: `closed` + - Workflow: + * `_orin.32g` + - Environment variables: + - *CATEGORY*: `edge` + - *DIVISION*: `closed` + - Workflow: + * `_phoenix` + - Environment variables: + - *CATEGORY*: `edge,datacenter` + - *DIVISION*: `closed` + - Workflow: + * `_sapphire-rapids.24c` + - Environment variables: + - *CATEGORY*: `edge,datacenter` + - *DIVISION*: `closed` + - Workflow: + +
+ + +#### Default variations + +`_performance-only` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Script workflow, dependencies and native scripts + +
+Click here to expand this section. + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/_cm.yaml) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/_cm.yaml) + 1. ***Run native script if exists*** + * [run-bert-macos.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-bert-macos.sh) + * [run-bert.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-bert.sh) + * [run-cpp-implementation.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-cpp-implementation.sh) + * [run-mobilenet-models.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-mobilenet-models.sh) + * [run-nvidia-4090.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-nvidia-4090.sh) + * [run-nvidia-a100.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-nvidia-a100.sh) + * [run-nvidia-t4.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-nvidia-t4.sh) + * [run-pruned-bert.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-pruned-bert.sh) + * [run-reference-models.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-reference-models.sh) + * [run-resnet50-macos.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-resnet50-macos.sh) + * [run-resnet50.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-resnet50.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/_cm.yaml) +
+ +___ +### Script output +`cmr "run natively all mlperf-models[,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize + +___ +### Maintainers + +* [Open MLCommons taskforce on automation and reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models.md b/docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models.md new file mode 100644 index 000000000..a72c5e798 --- /dev/null +++ b/docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models.md @@ -0,0 +1,383 @@ +Automatically generated README for this automation recipe: **run-mlperf-inference-mobilenet-models** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-inference-mobilenet-models,f21cc993a8b14a58) ]* + +--- + +## Set up + +We need to get imagenet full dataset to make image-classification submissions for MLPerf inference. Since this dataset is not publicly available via a URL please follow the instructions given [here](https://github.com/mlcommons/ck/blob/master/cm-mlops/script/get-dataset-imagenet-val/README-extra.md) to download the dataset and register in CM. + +
+Click here to set up docker (Optional). + +### Docker Setup + +CM commands are expected to run natively but if you prefer not to modify the host system, you can do the below command to set up a docker container. + +``` +cm docker script --tags=run,mobilenet-models,_tflite,_accuracy-only \ +--adr.compiler.tags=gcc \ +--docker_cm_repo=mlcommons@cm4mlops \ +--imagenet_path=$HOME/imagenet-2012-val \ +--results_dir=$HOME/mobilenet_results \ +--submission_dir=$HOME/inference_submission_3.1 \ +--docker_skip_run_cmd +``` + +This command will build a docker container and give you an interactive shell from which you can execute the below CM run commands. +* `results_dir`, `submission_dir` and `imagenet_path` are mounted from the host system. +* `results_dir` and `submission_dir` are expected to be empty directories to be populated by the docker +* `imagenet_path` should point to the imagenet folder containing the 50000 validation images. + +
+ +## Run Commands + +Since the runs can take many hours, in case you are running remotely you can install screen as follows. You may omit "screen" from all commands if you are running on a host system. +``` +cmr "get generic-sys-util _screen" +``` +### Default tflite + + +#### Do a full accuracy run for all the models (can take almost a day) + +``` +screen cmr "run mobilenet-models _tflite _accuracy-only" \ +--adr.compiler.tags=gcc \ +--results_dir=$HOME/mobilenet_results +``` + +#### Do a full performance run for all the models (can take almost a day) +``` +screen cmr "run mobilenet-models _tflite _performance-only" \ +--adr.compiler.tags=gcc \ +--results_dir=$HOME/mobilenet_results +``` + +#### Generate README files for all the runs +``` +cmr "run mobilenet-models _tflite _populate-readme" \ +--adr.compiler.tags=gcc \ +--results_dir=$HOME/mobilenet_results +``` + +#### Generate actual submission tree + +We should use the master branch of MLCommons inference repo for the submission checker. You can use `--hw_note_extra` option to add your name to the notes. +``` +cmr "generate inference submission" \ +--results_dir=$HOME/mobilenet_results/valid_results \ +--submission_dir=$HOME/mobilenet_submission_tree \ +--clean \ +--infer_scenario_results=yes \ +--adr.compiler.tags=gcc --adr.inference-src.version=master \ +--run-checker \ +--submitter=cTuning \ +--hw_notes_extra="Result taken by NAME" +``` +* Use `--hw_name="My system name"` to give a meaningful system name. Examples can be seen [here](https://github.com/mlcommons/inference_results_v3.0/tree/main/open/cTuning/systems) + +#### Push the results to GitHub repo + +First, create a fork of [this repo](https://github.com/ctuning/mlperf_inference_submissions_v3.1/). Then run the following command after replacing `--repo_url` with your fork URL. +``` +cmr "push github mlperf inference submission" \ +--submission_dir=$HOME/mobilenet_submission_tree \ +--repo_url=https://github.com/ctuning/mlperf_inference_submissions_v3.1/ \ +--commit_message="Mobilenet results added" +``` + +Create a PR to [cTuning repo](https://github.com/ctuning/mlperf_inference_submissions_v3.1/) + +### Using ARMNN with NEON + +Follow the same procedure as above but for the first three experiment runs add `_armnn,_neon` to the tags. For example +``` +cmr "run mobilenet-models _tflite _armnn _neon _accuracy-only" \ +--adr.compiler.tags=gcc \ +--results_dir=$HOME/mobilenet_results +``` + +`results_dir` and `submission_dir` can be the same as before as results will be going to different subfolders. + +### Using ARMNN with OpenCL +Follow the same procedure as above but for the first three experiment runs add `_armnn,_opencl` to the tags. For example +``` +cmr "run mobilenet-models _tflite _armnn _opencl _accuracy-only" \ +--adr.compiler.tags=gcc \ +--results_dir=$HOME/mobilenet_results +``` + +`results_dir` and `submission_dir` can be the same as before as results will be going to different subfolders. + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mobilenet models image-classification mobilenet-models mlperf inference" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference` + +`cm run script --tags=run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference[,variations] [--input_flags]` + +*or* + +`cmr "run mobilenet models image-classification mobilenet-models mlperf inference"` + +`cmr "run mobilenet models image-classification mobilenet-models mlperf inference [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mobilenet models image-classification mobilenet-models mlperf inference[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_armnn` + - Environment variables: + - *CM_MLPERF_USE_ARMNN_LIBRARY*: `yes` + - Workflow: + * `_neon` + - Aliases: `_use-neon` + - Environment variables: + - *CM_MLPERF_USE_NEON*: `yes` + - Workflow: + * `_only-fp32` + - Environment variables: + - *CM_MLPERF_RUN_INT8*: `no` + - Workflow: + * `_only-int8` + - Environment variables: + - *CM_MLPERF_RUN_FP32*: `no` + - Workflow: + * `_opencl` + - Environment variables: + - *CM_MLPERF_USE_OPENCL*: `yes` + - Workflow: + * `_tflite,armnn` + - Environment variables: + - *CM_MLPERF_TFLITE_ARMNN*: `yes` + - Workflow: + * `_tflite,armnn,neon` + - Environment variables: + - *CM_MLPERF_TFLITE_ARMNN_NEON*: `yes` + - Workflow: + * `_tflite,armnn,opencl` + - Environment variables: + - *CM_MLPERF_TFLITE_ARMNN_OPENCL*: `yes` + - Workflow: + +
+ + + * Group "**base-framework**" +
+ Click here to expand this section. + + * **`_tflite`** (default) + - Workflow: + +
+ + + * Group "**model-selection**" +
+ Click here to expand this section. + + * **`_all-models`** (default) + - Environment variables: + - *CM_MLPERF_RUN_MOBILENETS*: `yes` + - *CM_MLPERF_RUN_EFFICIENTNETS*: `yes` + - Workflow: + * `_efficientnet` + - Environment variables: + - *CM_MLPERF_RUN_EFFICIENTNETS*: `yes` + - Workflow: + * `_mobilenet` + - Environment variables: + - *CM_MLPERF_RUN_MOBILENETS*: `yes` + - Workflow: + +
+ + + * Group "**optimization**" +
+ Click here to expand this section. + + * **`_tflite-default`** (default) + - Environment variables: + - *CM_MLPERF_TFLITE_DEFAULT_MODE*: `yes` + - Workflow: + +
+ + + * Group "**run-mode**" +
+ Click here to expand this section. + + * `_accuracy-only` + - Environment variables: + - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `no` + - *CM_MLPERF_ACCURACY_MODE*: `yes` + - *CM_MLPERF_SUBMISSION_MODE*: `no` + - Workflow: + * `_find-performance` + - Environment variables: + - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `yes` + - *CM_MLPERF_SUBMISSION_MODE*: `no` + - Workflow: + * `_performance-only` + - Environment variables: + - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `no` + - *CM_MLPERF_PERFORMANCE_MODE*: `yes` + - *CM_MLPERF_SUBMISSION_MODE*: `no` + - Workflow: + * `_populate-readme` + - Environment variables: + - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `no` + - *CM_MLPERF_POPULATE_README*: `yes` + - Workflow: + * `_submission` + - Environment variables: + - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `no` + - *CM_MLPERF_SUBMISSION_MODE*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_all-models,_tflite,_tflite-default` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--find-performance=value` → `CM_MLPERF_FIND_PERFORMANCE_MODE=value` +* `--imagenet_path=value` → `IMAGENET_PATH=value` +* `--no-rerun=value` → `CM_MLPERF_NO_RERUN=value` +* `--power=value` → `CM_MLPERF_POWER=value` +* `--results_dir=value` → `CM_MLPERF_INFERENCE_RESULTS_DIR=value` +* `--submission=value` → `CM_MLPERF_SUBMISSION_MODE=value` +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "find-performance":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_RUN_MOBILENETS: `no` +* CM_MLPERF_RUN_EFFICIENTNETS: `no` +* CM_MLPERF_NO_RERUN: `no` +* CM_MLPERF_RUN_FP32: `yes` +* CM_MLPERF_RUN_INT8: `yes` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/_cm.json)*** + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/_cm.json) + +___ +### Script output +`cmr "run mobilenet models image-classification mobilenet-models mlperf inference [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker.md b/docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker.md new file mode 100644 index 000000000..a530b154e --- /dev/null +++ b/docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker.md @@ -0,0 +1,199 @@ +Automatically generated README for this automation recipe: **run-mlperf-inference-submission-checker** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-inference-submission-checker,15d03ec2c1af4297) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker` + +`cm run script --tags=run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker[,variations] [--input_flags]` + +*or* + +`cmr "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker"` + +`cmr "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_short-run` + - Environment variables: + - *CM_MLPERF_SHORT_RUN*: `yes` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--extra_args=value` → `CM_MLPERF_SUBMISSION_CHECKER_EXTRA_ARGS=value` +* `--extra_model_benchmark_map=value` → `CM_MLPERF_EXTRA_MODEL_MAPPING=value` +* `--input=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--power=value` → `CM_MLPERF_POWER=value` +* `--push_to_github=value` → `CM_MLPERF_RESULT_PUSH_TO_GITHUB=value` +* `--skip_compliance=value` → `CM_MLPERF_SKIP_COMPLIANCE=value` +* `--skip_power_check=value` → `CM_MLPERF_SKIP_POWER_CHECK=value` +* `--src_version=value` → `CM_MLPERF_SUBMISSION_CHECKER_VERSION=value` +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` +* `--tar=value` → `CM_TAR_SUBMISSION_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "extra_args":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_SHORT_RUN: `no` + +
+ +#### Versions +Default version: `master` + +* `master` +* `r3.0` +* `r3.1` +* `r4.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src', 'submission-checker-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_xlsxwriter + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.pyarrow + * CM names: `--adr.['pyarrow']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + * CM names: `--adr.['pandas']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,mlperf,submission,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` + * CM names: `--adr.['get-mlperf-submission-dir']...` + - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/_cm.json)*** + * publish-results,dashboard + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DASHBOARD': ['on']}` + - CM script: [publish-results-to-dashboard](https://github.com/mlcommons/cm4mlops/tree/master/script/publish-results-to-dashboard) + * publish-results,github + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_RESULT_PUSH_TO_GITHUB': ['on']}` + * CM names: `--adr.['push-to-github']...` + - CM script: [push-mlperf-inference-results-to-github](https://github.com/mlcommons/cm4mlops/tree/master/script/push-mlperf-inference-results-to-github) + * run,tar + * Enable this dependency only if all ENV vars are set:
+`{'CM_TAR_SUBMISSION_DIR': ['yes']}` + - CM script: [tar-my-folder](https://github.com/mlcommons/cm4mlops/tree/master/script/tar-my-folder) + +___ +### Script output +`cmr "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-mlperf-power-client.md b/docs/MLPerf-benchmark-support/run-mlperf-power-client.md new file mode 100644 index 000000000..d0892f842 --- /dev/null +++ b/docs/MLPerf-benchmark-support/run-mlperf-power-client.md @@ -0,0 +1,154 @@ +Automatically generated README for this automation recipe: **run-mlperf-power-client** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-power-client,bf6a6d0cc97b48ae) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,power,client,power-client* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlc mlcommons mlperf power client power-client" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlc,mlcommons,mlperf,power,client,power-client` + +`cm run script --tags=run,mlc,mlcommons,mlperf,power,client,power-client [--input_flags]` + +*or* + +`cmr "run mlc mlcommons mlperf power client power-client"` + +`cmr "run mlc mlcommons mlperf power client power-client " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlc,mlcommons,mlperf,power,client,power-client' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,power,client,power-client"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlc mlcommons mlperf power client power-client" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--loadgen_logs_dir=value` → `CM_MLPERF_LOADGEN_LOGS_DIR=value` +* `--log_dir=value` → `CM_MLPERF_POWER_LOG_DIR=value` +* `--max_amps=value` → `CM_MLPERF_POWER_MAX_AMPS=value` +* `--max_volts=value` → `CM_MLPERF_POWER_MAX_VOLTS=value` +* `--ntp_server=value` → `CM_MLPERF_POWER_NTP_SERVER=value` +* `--port=value` → `CM_MLPERF_POWER_SERVER_PORT=value` +* `--power_server=value` → `CM_MLPERF_POWER_SERVER_ADDRESS=value` +* `--run_cmd=value` → `CM_MLPERF_RUN_CMD=value` +* `--server=value` → `CM_MLPERF_POWER_SERVER_ADDRESS=value` +* `--server_port=value` → `CM_MLPERF_POWER_SERVER_PORT=value` +* `--timestamp=value` → `CM_MLPERF_POWER_TIMESTAMP=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "loadgen_logs_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_POWER_LOG_DIR: `logs` +* CM_MLPERF_RUN_CMD: `` +* CM_MLPERF_POWER_SERVER_ADDRESS: `localhost` +* CM_MLPERF_POWER_NTP_SERVER: `time.google.com` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,power,src + * CM names: `--adr.['power-src']...` + - CM script: [get-mlperf-power-dev](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-power-dev) + * get,generic-sys-util,_ntpdate + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/_cm.json) + +___ +### Script output +`cmr "run mlc mlcommons mlperf power client power-client " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-mlperf-power-server.md b/docs/MLPerf-benchmark-support/run-mlperf-power-server.md new file mode 100644 index 000000000..99e37b374 --- /dev/null +++ b/docs/MLPerf-benchmark-support/run-mlperf-power-server.md @@ -0,0 +1,165 @@ +Automatically generated README for this automation recipe: **run-mlperf-power-server** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-power-server,5bc68aaf389a40bd) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,power,server,power-server* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlc mlcommons mlperf power server power-server" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlc,mlcommons,mlperf,power,server,power-server` + +`cm run script --tags=run,mlc,mlcommons,mlperf,power,server,power-server [--input_flags]` + +*or* + +`cmr "run mlc mlcommons mlperf power server power-server"` + +`cmr "run mlc mlcommons mlperf power server power-server " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlc,mlcommons,mlperf,power,server,power-server' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,power,server,power-server"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlc mlcommons mlperf power server power-server" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--device_port=value` → `CM_MLPERF_POWER_DEVICE_PORT=value` +* `--device_type=value` → `CM_MLPERF_POWER_DEVICE_TYPE=value` +* `--interface_flag=value` → `CM_MLPERF_POWER_INTERFACE_FLAG=value` +* `--ntp_server=value` → `CM_MLPERF_POWER_NTP_SERVER=value` +* `--screen=value` → `CM_MLPERF_POWER_SERVER_USE_SCREEN=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "device_port":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_POWER_NTP_SERVER: `time.google.com` +* CM_MLPERF_POWER_INTERFACE_FLAG: `` +* CM_MLPERF_POWER_DEVICE_TYPE: `49` +* CM_MLPERF_POWER_SERVER_ADDRESS: `0.0.0.0` +* CM_MLPERF_POWER_SERVER_PORT: `4950` +* CM_MLPERF_POWER_DEVICE_PORT: `/dev/usbtmc0` +* CM_MLPERF_POWER_SERVER_USE_SCREEN: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,mlperf,power,src + * CM names: `--adr.['power-src']...` + - CM script: [get-mlperf-power-dev](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-power-dev) + * get,mlperf,power,daemon + * CM names: `--adr.['power-damenon']...` + - CM script: [get-spec-ptd](https://github.com/mlcommons/cm4mlops/tree/master/script/get-spec-ptd) + * get,generic,sys-util,_screen + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_HOST_OS_TYPE': 'windows'}` + * CM names: `--adr.['screen']...` + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic-python-lib,_package.pypiwin32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_HOST_OS_TYPE': 'windows'}` + * CM names: `--adr.['win32']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/_cm.json) + +___ +### Script output +`cmr "run mlc mlcommons mlperf power server power-server " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker.md b/docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker.md new file mode 100644 index 000000000..10f093c5f --- /dev/null +++ b/docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker.md @@ -0,0 +1,181 @@ +Automatically generated README for this automation recipe: **run-mlperf-training-submission-checker** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-training-submission-checker,cb5cb60ac9a74d09) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker` + +`cm run script --tags=run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker[,variations] [--input_flags]` + +*or* + +`cmr "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker"` + +`cmr "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_short-run` + - Environment variables: + - *CM_MLPERF_SHORT_RUN*: `yes` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--extra_args=value` → `CM_MLPERF_SUBMISSION_CHECKER_EXTRA_ARGS=value` +* `--input=value` → `CM_MLPERF_SUBMISSION_DIR=value` +* `--power=value` → `CM_MLPERF_POWER=value` +* `--push_to_github=value` → `CM_MLPERF_RESULT_PUSH_TO_GITHUB=value` +* `--skip_compliance=value` → `CM_MLPERF_SKIP_COMPLIANCE=value` +* `--skip_power_check=value` → `CM_MLPERF_SKIP_POWER_CHECK=value` +* `--src_version=value` → `CM_MLPERF_SUBMISSION_CHECKER_VERSION=value` +* `--submission_dir=value` → `CM_MLPERF_SUBMISSION_DIR=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` +* `--tar=value` → `CM_TAR_SUBMISSION_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "extra_args":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_SHORT_RUN: `no` + +
+ +#### Versions +Default version: `master` + +* `master` +* `r3.0` +* `r3.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src', 'submission-checker-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * install,mlperf,logging,from.src + - CM script: [install-mlperf-logging-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-mlperf-logging-from-src) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/_cm.json)*** + * publish-results,github + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_RESULT_PUSH_TO_GITHUB': ['on']}` + * CM names: `--adr.['push-to-github']...` + - CM script: [push-mlperf-inference-results-to-github](https://github.com/mlcommons/cm4mlops/tree/master/script/push-mlperf-inference-results-to-github) + * run,tar + * Enable this dependency only if all ENV vars are set:
+`{'CM_TAR_SUBMISSION_DIR': ['yes']}` + - CM script: [tar-my-folder](https://github.com/mlcommons/cm4mlops/tree/master/script/tar-my-folder) + +___ +### Script output +`cmr "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log.md b/docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log.md new file mode 100644 index 000000000..bd14b1c25 --- /dev/null +++ b/docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log.md @@ -0,0 +1,145 @@ +Automatically generated README for this automation recipe: **truncate-mlperf-inference-accuracy-log** + +Category: **MLPerf benchmark support** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=truncate-mlperf-inference-accuracy-log,9d5ec20434084d14) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator` + +`cm run script --tags=run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator [--input_flags]` + +*or* + +`cmr "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator"` + +`cmr "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator"``` + +#### Run this script via Docker (beta) + +`cm docker script "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlperf,submission,dir + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` + * CM names: `--adr.['get-mlperf-submission-dir']...` + - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/_cm.json) + +___ +### Script output +`cmr "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py.md new file mode 100644 index 000000000..eb4e16f3d --- /dev/null +++ b/docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py.md @@ -0,0 +1,213 @@ +Automatically generated README for this automation recipe: **app-image-classification-onnx-py** + +Category: **Modular AI/ML application pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-classification-onnx-py,3d5e908e472b417e) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *modular,python,app,image-classification,onnx* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "modular python app image-classification onnx" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=modular,python,app,image-classification,onnx` + +`cm run script --tags=modular,python,app,image-classification,onnx[,variations] [--input_flags]` + +*or* + +`cmr "modular python app image-classification onnx"` + +`cmr "modular python app image-classification onnx [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**input**=Path to JPEG image to classify +* --**output**=Output directory (optional) +* --**j**=Print JSON output + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'modular,python,app,image-classification,onnx' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="modular,python,app,image-classification,onnx"``` + +#### Run this script via Docker (beta) + +`cm docker script "modular python app image-classification onnx[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**target**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *USE_CPU*: `True` + - Workflow: + * `_cuda` + - Environment variables: + - *USE_CUDA*: `True` + - Workflow: + +
+ + +#### Default variations + +`_cpu` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--input=value` → `CM_IMAGE=value` +* `--output=value` → `CM_APP_IMAGE_CLASSIFICATION_ONNX_PY_OUTPUT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,cudnn + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['cudnn']...` + - CM script: [get-cudnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cudnn) + * get,dataset,imagenet,image-classification,original,_run-during-docker-build + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + * get,dataset-aux,imagenet-aux,image-classification + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,ml-model,resnet50,_onnx,image-classification + * CM names: `--adr.['ml-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,generic-python-lib,_package.Pillow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime + * Skip this dependenecy only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['onnxruntime']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime_gpu + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['onnxruntime']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/_cm.yaml) + +___ +### Script output +`cmr "modular python app image-classification onnx [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_APP_IMAGE_CLASSIFICATION_ONNX_PY*` +#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp.md new file mode 100644 index 000000000..4609e0f99 --- /dev/null +++ b/docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp.md @@ -0,0 +1,133 @@ +Automatically generated README for this automation recipe: **app-image-classification-tf-onnx-cpp** + +Category: **Modular AI/ML application pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-classification-tf-onnx-cpp,879ed32e47074033) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *app,image-classification,cpp,tensorflow,onnx* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app image-classification cpp tensorflow onnx" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,image-classification,cpp,tensorflow,onnx` + +`cm run script --tags=app,image-classification,cpp,tensorflow,onnx ` + +*or* + +`cmr "app image-classification cpp tensorflow onnx"` + +`cmr "app image-classification cpp tensorflow onnx " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,image-classification,cpp,tensorflow,onnx' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,image-classification,cpp,tensorflow,onnx"``` + +#### Run this script via Docker (beta) + +`cm docker script "app image-classification cpp tensorflow onnx" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + * get,dataset,image-classification,original + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + * get,dataset-aux,image-classification + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,ml-model,raw,image-classification,resnet50,_onnx,_opset-11 + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * tensorflow,from-src + - CM script: [install-tensorflow-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-from-src) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/_cm.json) + +___ +### Script output +`cmr "app image-classification cpp tensorflow onnx " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py.md new file mode 100644 index 000000000..2f2413737 --- /dev/null +++ b/docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py.md @@ -0,0 +1,170 @@ +Automatically generated README for this automation recipe: **app-image-classification-torch-py** + +Category: **Modular AI/ML application pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-classification-torch-py,e3986ae887b84ca8) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *app,image-classification,python,torch* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app image-classification python torch" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,image-classification,python,torch` + +`cm run script --tags=app,image-classification,python,torch[,variations] ` + +*or* + +`cmr "app image-classification python torch"` + +`cmr "app image-classification python torch [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,image-classification,python,torch' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,image-classification,python,torch"``` + +#### Run this script via Docker (beta) + +`cm docker script "app image-classification python torch[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_cuda` + - Environment variables: + - *USE_CUDA*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,imagenet,image-classification,preprocessed + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset-aux,imagenet-aux,image-classification + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,imagenet-helper + - CM script: [get-dataset-imagenet-helper](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-helper) + * get,ml-model,image-classification,resnet50,_pytorch,_fp32 + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,generic-python-lib,_torch + * Skip this dependenecy only if all ENV vars are set:
+`{'USE_CUDA': ['yes']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch_cuda + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': ['yes']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + * Skip this dependenecy only if all ENV vars are set:
+`{'USE_CUDA': ['yes']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision_cuda + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': ['yes']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/_cm.json) + +___ +### Script output +`cmr "app image-classification python torch [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py.md new file mode 100644 index 000000000..c94a3505f --- /dev/null +++ b/docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py.md @@ -0,0 +1,158 @@ +Automatically generated README for this automation recipe: **app-image-classification-tvm-onnx-py** + +Category: **Modular AI/ML application pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-classification-tvm-onnx-py,63080407db4d4ac4) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *app,image-classification,python,tvm-onnx* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app image-classification python tvm-onnx" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,image-classification,python,tvm-onnx` + +`cm run script --tags=app,image-classification,python,tvm-onnx[,variations] ` + +*or* + +`cmr "app image-classification python tvm-onnx"` + +`cmr "app image-classification python tvm-onnx [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,image-classification,python,tvm-onnx' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,image-classification,python,tvm-onnx"``` + +#### Run this script via Docker (beta) + +`cm docker script "app image-classification python tvm-onnx[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_cuda` + - Environment variables: + - *USE_CUDA*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_llvm` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,dataset,image-classification,original + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + * get,dataset-aux,image-classification + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,raw,ml-model,image-classification,resnet50,_onnx + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,generic-python-lib,_onnxruntime + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,tvm + * CM names: `--adr.['tvm']...` + - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/_cm.json) + +___ +### Script output +`cmr "app image-classification python tvm-onnx [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py.md b/docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py.md new file mode 100644 index 000000000..14858f184 --- /dev/null +++ b/docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py.md @@ -0,0 +1,203 @@ +Automatically generated README for this automation recipe: **app-stable-diffusion-onnx-py** + +Category: **Modular AI/ML application pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-stable-diffusion-onnx-py,4d33981ac3534b3b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *modular,python,app,stable-diffusion,onnx* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "modular python app stable-diffusion onnx" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=modular,python,app,stable-diffusion,onnx` + +`cm run script --tags=modular,python,app,stable-diffusion,onnx[,variations] [--input_flags]` + +*or* + +`cmr "modular python app stable-diffusion onnx"` + +`cmr "modular python app stable-diffusion onnx [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**text**=Text to generate image +* --**output**=Output directory + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "text":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'modular,python,app,stable-diffusion,onnx' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="modular,python,app,stable-diffusion,onnx"``` + +#### Run this script via Docker (beta) + +`cm docker script "modular python app stable-diffusion onnx[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**target**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *USE_CPU*: `True` + - *CM_DEVICE*: `cpu` + - Workflow: + * `_cuda` + - Environment variables: + - *USE_CUDA*: `True` + - *CM_DEVICE*: `cuda:0` + - Workflow: + +
+ + +#### Default variations + +`_cpu` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--output=value` → `CM_APP_STABLE_DIFFUSION_ONNX_PY_OUTPUT=value` +* `--text=value` → `CM_APP_STABLE_DIFFUSION_ONNX_PY_TEXT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "output":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,cudnn + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['cudnn']...` + - CM script: [get-cudnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cudnn) + * get,generic-python-lib,_package.optimum[onnxruntime] + * Skip this dependenecy only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['optimum']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.optimum[onnxruntime-gpu] + * Enable this dependency only if all ENV vars are set:
+`{'USE_CUDA': [True]}` + * CM names: `--adr.['optimum']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.diffusers + * CM names: `--adr.['diffusers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,ml-model,huggingface,zoo,_model-stub.runwayml/stable-diffusion-v1-5 + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/_cm.yaml) + +___ +### Script output +`cmr "modular python app stable-diffusion onnx [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy.md b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy.md new file mode 100644 index 000000000..64b91c4e3 --- /dev/null +++ b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy.md @@ -0,0 +1,360 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-dummy** + +Category: **Modular MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-dummy,5b71627383a94576) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "reproduce mlcommons mlperf inference harness dummy-harness dummy" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy` + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy[,variations] [--input_flags]` + +*or* + +`cmr "reproduce mlcommons mlperf inference harness dummy-harness dummy"` + +`cmr "reproduce mlcommons mlperf inference harness dummy-harness dummy [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy"``` + +#### Run this script via Docker (beta) + +`cm docker script "reproduce mlcommons mlperf inference harness dummy-harness dummy[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_bert_` + - Workflow: + * `_gptj_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,gptj + * CM names: `--adr.['gptj-model']...` + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + * get,dataset,cnndm,_validation + - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) + * `_llama2-70b_` + - Workflow: + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_pytorch,cpu` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_pytorch,cuda` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch_cuda + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_singlestream,resnet50` + - Workflow: + * `_singlestream,retinanet` + - Workflow: + +
+ + + * Group "**backend**" +
+ Click here to expand this section. + + * **`_pytorch`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_bs.#` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * `_offline` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_bert-99` + - Environment variables: + - *CM_MODEL*: `bert-99` + - *CM_SQUAD_ACCURACY_DTYPE*: `float32` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_MODEL*: `bert-99.9` + - Workflow: + * `_gptj-99` + - Environment variables: + - *CM_MODEL*: `gptj-99` + - *CM_SQUAD_ACCURACY_DTYPE*: `float32` + - Workflow: + * `_gptj-99.9` + - Environment variables: + - *CM_MODEL*: `gptj-99.9` + - Workflow: + * `_llama2-70b-99` + - Environment variables: + - *CM_MODEL*: `llama2-70b-99` + - Workflow: + * `_llama2-70b-99.9` + - Environment variables: + - *CM_MODEL*: `llama2-70b-99.9` + - Workflow: + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_fp16` + - Workflow: + * `_fp32` + - Workflow: + * `_uint8` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_pytorch,_resnet50` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--rerun=value` → `CM_RERUN=value` +* `--results_repo=value` → `CM_MLPERF_INFERENCE_RESULTS_REPO=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--skip_preprocess=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--skip_preprocessing=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "count":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_MLPERF_LOADGEN_MODE: `performance` +* CM_SKIP_PREPROCESS_DATASET: `no` +* CM_SKIP_MODEL_DOWNLOAD: `no` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `dummy_harness` +* CM_MLPERF_SKIP_RUN: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlcommons,inference,loadgen + * CM names: `--adr.['inference-loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * generate,user-conf,mlperf,inference + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + * get,generic-python-lib,_mlperf_logging + * CM names: `--adr.['mlperf-logging']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,git,repo + * CM names: `--adr.inference-results inference-code...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/_cm.yaml)*** + * benchmark-mlperf + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes', True]}` + * CM names: `--adr.['runner', 'mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "reproduce mlcommons mlperf inference harness dummy-harness dummy [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_HW_NAME` +* `CM_IMAGENET_ACCURACY_DTYPE` +* `CM_MAX_EXAMPLES` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +* `CM_SQUAD_ACCURACY_DTYPE` +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel.md b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel.md new file mode 100644 index 000000000..3278fbf44 --- /dev/null +++ b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel.md @@ -0,0 +1,621 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-intel** + +Category: **Modular MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-intel,c05a90433bb04cc1) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel` + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel[,variations] [--input_flags]` + +*or* + +`cmr "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel"` + +`cmr "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel"``` + +#### Run this script via Docker (beta) + +`cm docker script "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_bert_` + - Environment variables: + - *CM_BENCHMARK*: `STANDALONE_BERT` + - *dataset_squad_tokenized_max_seq_length*: `384` + - *loadgen_buffer_size*: `10833` + - *loadgen_dataset_size*: `10833` + - Workflow: + * `_build-harness,bert_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-sys-util,_rsync + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,dataset,original,squad + * CM names: `--adr.['squad-original']...` + - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) + * get,ml-model,bert-large,_pytorch,_int8 + * CM names: `--adr.['bert-large', 'ml-model']...` + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + * get,generic-python-lib,_package.tokenization + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_calibration,gptj_` + - Workflow: + * `_gptj_` + - Environment variables: + - *CM_BENCHMARK*: `STANDALONE_GPTJ` + - Workflow: + * `_int4,gptj_` + - Environment variables: + - *INTEL_GPTJ_INT4*: `yes` + - Workflow: + * `_int8,gptj_` + - Environment variables: + - *INTEL_GPTJ_INT4*: `no` + - Workflow: + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_bert_,network-client` + - Environment variables: + - *CM_BENCHMARK*: `NETWORK_BERT_CLIENT` + - Workflow: + * `_bert_,network-server` + - Environment variables: + - *CM_BENCHMARK*: `NETWORK_BERT_SERVER` + - Workflow: + * `_bert_,pytorch` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,conda,_name.bert-pt + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * install,llvm,src,_tag.llvmorg-15.0.7,_runtimes.libcxx:libcxxabi:openmp,_clang,_release,_for-intel-mlperf-inference-v3.1-bert + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + * get,generic-sys-util,_libffi7 + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,conda-package,_package.python + * CM names: `--adr.['conda-package', 'python']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic,conda-package,_package.ncurses,_source.conda-forge + * CM names: `--adr.['conda-package', 'ncurses']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic-sys-util,_numactl + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,conda-package,_package.jemalloc,_source.conda-forge + * CM names: `--adr.['conda-package', 'jemalloc']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,pytorch,from.src,_for-intel-mlperf-inference-v3.1-bert + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * install,onednn,from.src,_for-intel-mlperf-inference-v3.1-bert + - CM script: [install-onednn-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-onednn-from-src) + * install,transformers,from.src,_for-intel-mlperf-inference-v3.1-bert + - CM script: [install-transformers-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-transformers-from-src) + * `_bs.#` + - Environment variables: + - *ML_MLPERF_MODEL_BATCH_SIZE*: `#` + - Workflow: + * `_gptj_,build-harness` + - Workflow: + * `_gptj_,pytorch` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,conda,_name.gptj-pt + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,python,_conda.gptj-pt + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * install,llvm,src,_tag.llvmorg-16.0.6,_clang,_release,_for-intel-mlperf-inference-v3.1-gptj + - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) + * get,generic,conda-package,_package.ncurses,_source.conda-forge + * CM names: `--adr.['conda-package', 'ncurses']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * get,generic-sys-util,_numactl + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,conda-package,_package.jemalloc,_source.conda-forge + * CM names: `--adr.['conda-package', 'jemalloc']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * install,ipex,from.src,_for-intel-mlperf-inference-v3.1-gptj + - CM script: [install-ipex-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-ipex-from-src) + * get,generic,conda-package,_package.ninja + * Enable this dependency only if all ENV vars are set:
+`{'INTEL_GPTJ_INT4': ['yes']}` + * CM names: `--adr.['conda-package', 'ninja']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * install,tpp-pex,from.src,_for-intel-mlperf-inference-v3.1-gptj + * Enable this dependency only if all ENV vars are set:
+`{'INTEL_GPTJ_INT4': ['yes']}` + - CM script: [install-tpp-pytorch-extension](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tpp-pytorch-extension) + * get,generic-python-lib,_package.transformers + * CM names: `--adr.['pip-package', 'transformers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlcommons,inference,loadgen,_custom-python + * CM names: `--adr.['inference-loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * get,ml-model,large-language-model,gptj + * CM names: `--adr.['ml-model', 'gptj-model', 'gpt-j-model']...` + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + * get,generic-python-lib,_package.datasets + * CM names: `--adr.['pip-package', 'datasets']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.accelerate + * CM names: `--adr.['pip-package', 'accelerate']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_custom-python,_package.torch,_url.git+https://github.com/pytorch/pytorch.git@927dc662386af052018212c7d01309a506fc94cd + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_int4,gptj_,build-harness` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * reproduce,mlperf,inference,intel,harness,_calibration + * CM names: `--adr.['calibration']...` + - CM script: [app-mlperf-inference-intel](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-intel) + * get,generic-python-lib,_package.optimum + * CM names: `--adr.['pip-package', 'optimum']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_resnet50,uint8` + - Environment variables: + - *CM_IMAGENET_ACCURACY_DTYPE*: `int8` + - Workflow: + * `_sapphire-rapids.112c,gptj-99,offline,int4` + - Environment variables: + - *NUM_PROC*: `4` + - *KMP_BLOCKTIME*: `1` + - *WORKERS_PER_PROC*: `3` + - Workflow: + * `_sapphire-rapids.112c,gptj-99,offline,int8` + - Environment variables: + - *KMP_BLOCKTIME*: `1` + - *WORKERS_PER_PROC*: `2` + - Workflow: + * `_sapphire-rapids.112c,gptj-99,server,int4` + - Environment variables: + - *KMP_BLOCKTIME*: `1` + - *WORKERS_PER_PROC*: `4` + - Workflow: + * `_sapphire-rapids.112c,gptj-99,server,int8` + - Environment variables: + - *KMP_BLOCKTIME*: `1` + - *WORKERS_PER_PROC*: `2` + - Workflow: + * `_sapphire-rapids.24c,bert-99` + - Environment variables: + - *WORKERS_PER_PROC*: `1` + - Workflow: + * `_sapphire-rapids.24c,gptj-99,offline,int4` + - Environment variables: + - *KMP_BLOCKTIME*: `10` + - *WORKERS_PER_PROC*: `1` + - Workflow: + * `_sapphire-rapids.24c,gptj-99,offline,int8` + - Environment variables: + - *KMP_BLOCKTIME*: `10` + - *WORKERS_PER_PROC*: `1` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_pytorch`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - *CM_MLPERF_BACKEND_LIB_NAMESPEC*: `pytorch` + - Workflow: + +
+ + + * Group "**loadgen-batchsize**" +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_MLPERF_LOADGEN_BATCH_SIZE*: `#` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * `_offline` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_bert-99` + - Environment variables: + - *CM_MODEL*: `bert-99` + - *CM_SQUAD_ACCURACY_DTYPE*: `float32` + - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_MODEL*: `bert-99.9` + - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` + - Workflow: + * `_gptj-99` + - Environment variables: + - *CM_MODEL*: `gptj-99` + - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - Workflow: + * `_gptj-99.9` + - Environment variables: + - *CM_MODEL*: `gptj-99.9` + - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` + - Workflow: + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - *dataset_imagenet_preprocessed_input_square_side*: `224` + - *ml_model_has_background_class*: `YES` + - *ml_model_image_height*: `224` + - *loadgen_buffer_size*: `1024` + - *loadgen_dataset_size*: `50000` + - *CM_BENCHMARK*: `STANDALONE_CLASSIFICATION` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/6617981/files/resnext50_32x4d_fpn.pth` + - *dataset_imagenet_preprocessed_input_square_side*: `224` + - *ml_model_image_height*: `800` + - *ml_model_image_width*: `800` + - *loadgen_buffer_size*: `64` + - *loadgen_dataset_size*: `24576` + - *CM_BENCHMARK*: `STANDALONE_OBJECT_DETECTION` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**network-mode**" +
+ Click here to expand this section. + + * `_network-server` + - Environment variables: + - *CM_MLPERF_NETWORK_RUN_MODE*: `network-server` + - Workflow: + * **`_standalone`** (default) + - Environment variables: + - *CM_MLPERF_NETWORK_RUN_MODE*: `standalone` + - Workflow: + +
+ + + * Group "**network-run-mode**" +
+ Click here to expand this section. + + * `_network-client` + - Environment variables: + - *CM_MLPERF_NETWORK_RUN_MODE*: `network-client` + - Workflow: + +
+ + + * Group "**power-mode**" +
+ Click here to expand this section. + + * `_maxn` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_MAXN*: `True` + - Workflow: + * `_maxq` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_MAXQ*: `True` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_fp32` + - Environment variables: + - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` + - Workflow: + * `_int4` + - Workflow: + * `_uint8` + - Workflow: + +
+ + + * Group "**run-mode**" +
+ Click here to expand this section. + + * `_build-harness` + - Environment variables: + - *CM_LOCAL_MLPERF_INFERENCE_INTEL_RUN_MODE*: `build_harness` + - Workflow: + * `_calibration` + - Environment variables: + - *CM_LOCAL_MLPERF_INFERENCE_INTEL_RUN_MODE*: `calibration` + - Workflow: + * **`_run-harness`** (default) + - Environment variables: + - *CM_LOCAL_MLPERF_INFERENCE_INTEL_RUN_MODE*: `run_harness` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * reproduce,mlperf,inference,intel,harness,_build-harness + * CM names: `--adr.['build-harness']...` + - CM script: [app-mlperf-inference-intel](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-intel) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * generate,user-conf,mlperf,inference + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + +
+ + + * Group "**sut**" +
+ Click here to expand this section. + + * `_sapphire-rapids.112c` + - Environment variables: + - *WARMUP*: ` --warmup` + - Workflow: + * `_sapphire-rapids.24c` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_pytorch,_resnet50,_run-harness,_standalone` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--rerun=value` → `CM_RERUN=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--skip_preprocess=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--skip_preprocessing=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "count":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` +* CM_FAST_COMPILATION: `yes` +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_MLPERF_LOADGEN_MODE: `performance` +* CM_SKIP_PREPROCESS_DATASET: `no` +* CM_SKIP_MODEL_DOWNLOAD: `no` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `intel` +* CM_MLPERF_SKIP_RUN: `no` +* verbosity: `1` +* loadgen_trigger_cold_run: `0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,generic-python-lib,_mlperf_logging + * CM names: `--adr.['mlperf-logging']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,ml-model,resnet50,_fp32,_onnx,_from-tf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['resnet50-model', 'ml-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * compile,intel,model,_resnet50 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['resnet50-compiler']...` + - *Warning: no scripts found* + * get,dataset,imagenet,preprocessed,_for.resnet50,_NHWC,_full + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['imagenet-preprocessed', 'dataset-preprocessed']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * compile,intel,model,_retinanet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['retinanet-compiler']...` + - *Warning: no scripts found* + * get,dataset,preprocessed,openimages,_for.retinanet.onnx,_NCHW,_validation,_custom-annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['openimages-preprocessed', 'dataset-preprocessed']...` + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + * get,mlperf,inference,results,_ctuning + * CM names: `--adr.inference-results...` + - CM script: [get-mlperf-inference-results](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/_cm.yaml) + 1. ***Run native script if exists*** + * [run_bert_harness.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/run_bert_harness.sh) + * [run_gptj_harness.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/run_gptj_harness.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/_cm.yaml)*** + * benchmark-mlperf + * Enable this dependency only if all ENV vars are set:
+`{'CM_LOCAL_MLPERF_INFERENCE_INTEL_RUN_MODE': ['run_harness']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes', True]}` + * CM names: `--adr.['runner', 'mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm.md b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm.md new file mode 100644 index 000000000..6205c2108 --- /dev/null +++ b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm.md @@ -0,0 +1,775 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-qualcomm** + +Category: **Modular MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-qualcomm,eef1aca5d7c0470e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt` + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt[,variations] [--input_flags]` + +*or* + +`cmr "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt"` + +`cmr "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt"``` + +#### Run this script via Docker (beta) + +`cm docker script "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_bert_` + - Environment variables: + - *CM_BENCHMARK*: `STANDALONE_BERT` + - *kilt_model_name*: `bert` + - *kilt_model_seq_length*: `384` + - *kilt_model_bert_variant*: `BERT_PACKED` + - *kilt_input_format*: `INT64,1,384:INT64,1,8:INT64,1,384:INT64,1,384` + - *kilt_output_format*: `FLOAT32,1,384:FLOAT32,1,384` + - *dataset_squad_tokenized_max_seq_length*: `384` + - *loadgen_buffer_size*: `10833` + - *loadgen_dataset_size*: `10833` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_safetensors + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_activation-count.#` + - Environment variables: + - *CM_MLPERF_QAIC_ACTIVATION_COUNT*: `#` + - Workflow: + * `_bert-99,offline` + - Workflow: + * `_bert-99,qaic` + - Environment variables: + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `https://github.com/mlcommons/inference_results_v3.1/blob/main/closed/Qualcomm/calibration.md` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8,fp16` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * compile,qaic,model,_bert-99,_pc.99.9980 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['qaic-model-compiler', 'bert-99-compiler']...` + - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) + * `_bert-99.9,offline` + - Workflow: + * `_bert-99.9,qaic` + - Environment variables: + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `https://github.com/mlcommons/inference_results_v3.1/blob/main/closed/Qualcomm/calibration.md` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int32` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp16` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * compile,qaic,model,_bert-99.9 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['qaic-model-compiler', 'bert-99.9-compiler']...` + - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) + * `_bert_,network-client` + - Environment variables: + - *CM_BENCHMARK*: `NETWORK_BERT_CLIENT` + - Workflow: + * `_bert_,network-server` + - Environment variables: + - *CM_BENCHMARK*: `NETWORK_BERT_SERVER` + - Workflow: + * `_bert_,qaic` + - Environment variables: + - *kilt_model_batch_size*: `1` + - *kilt_input_format*: `UINT32,1,384:UINT32,1,8:UINT32,1,384:UINT32,1,384` + - *kilt_input_formata*: `UINT32,1,384:UINT32,1,384:UINT32,1,384` + - *kilt_output_formatia*: `UINT8,1,384:UINT8,1,384` + - *kilt_device_qaic_skip_stage*: `convert` + - Workflow: + * `_bert_,singlestream` + - Environment variables: + - *kilt_model_batch_size*: `1` + - Workflow: + * `_dl2q.24xlarge,bert-99,offline` + - Environment variables: + - *qaic_activation_count*: `14` + - Workflow: + * `_dl2q.24xlarge,bert-99.9,offline` + - Environment variables: + - *qaic_activation_count*: `7` + - Workflow: + * `_dl2q.24xlarge,bert-99.9,server` + - Environment variables: + - *qaic_activation_count*: `7` + - Workflow: + * `_dl2q.24xlarge,resnet50,multistream` + - Environment variables: + - *qaic_activation_count*: `1` + - Workflow: + * `_dl2q.24xlarge,resnet50,offline` + - Environment variables: + - *qaic_activation_count*: `3` + - Workflow: + * `_dl2q.24xlarge,resnet50,server` + - Environment variables: + - *qaic_activation_count*: `3` + - Workflow: + * `_dl2q.24xlarge,retinanet,offline` + - Environment variables: + - *qaic_activation_count*: `14` + - Workflow: + * `_dl2q.24xlarge,retinanet,server` + - Environment variables: + - *qaic_activation_count*: `14` + - Workflow: + * `_dl2q.24xlarge,singlestream` + - Environment variables: + - *CM_QAIC_DEVICES*: `0` + - *qaic_activation_count*: `1` + - Workflow: + * `_num-devices.4` + - Environment variables: + - *CM_QAIC_DEVICES*: `0,1,2,3` + - Workflow: + * `_pro` + - Environment variables: + - *qaic_queue_length*: `10` + - Workflow: + * `_pro,num-devices.4,bert-99,offline` + - Environment variables: + - *qaic_activation_count*: `16` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * set,device,qaic,_vc.15 + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * `_pro,num-devices.4,bert-99,server` + - Environment variables: + - *qaic_activation_count*: `16` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * set,device,qaic,_vc.13 + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * `_pro,num-devices.4,bert-99.9,offline` + - Environment variables: + - *qaic_activation_count*: `8` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * set,device,qaic,_vc.13 + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * `_pro,num-devices.4,bert-99.9,server` + - Environment variables: + - *qaic_activation_count*: `8` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * set,device,qaic,_vc.13 + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * `_pro,num-devices.4,resnet50,offline` + - Environment variables: + - *qaic_activation_count*: `4` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * set,device,qaic,_vc.16 + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * `_pro,num-devices.4,resnet50,server` + - Environment variables: + - *qaic_activation_count*: `4` + - Workflow: + * `_pro,num-devices.4,retinanet,offline` + - Environment variables: + - *qaic_activation_count*: `16` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * set,device,qaic,_vc.17 + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * `_pro,num-devices.4,retinanet,server` + - Environment variables: + - *qaic_activation_count*: `16` + - Workflow: + * `_pro,num-devices.4,singlestream` + - Environment variables: + - *CM_QAIC_DEVICES*: `0` + - *qaic_activation_count*: `1` + - Workflow: + * `_rb6,bert-99,offline` + - Environment variables: + - *qaic_activation_count*: `9` + - Workflow: + * `_rb6,resnet50,multistream` + - Environment variables: + - *qaic_activation_count*: `2` + - Workflow: + * `_rb6,resnet50,offline` + - Environment variables: + - *qaic_activation_count*: `2` + - Workflow: + * `_rb6,retinanet,multistream` + - Environment variables: + - *qaic_activation_count*: `8` + - Workflow: + * `_rb6,retinanet,offline` + - Environment variables: + - *qaic_activation_count*: `9` + - Workflow: + * `_rb6,singlestream` + - Environment variables: + - *qaic_activation_count*: `1` + - Workflow: + * `_resnet50,uint8` + - Environment variables: + - *kilt_input_format*: `UINT8,-1,224,224,3` + - *kilt_device_qaic_skip_stage*: `convert` + - *CM_IMAGENET_ACCURACY_DTYPE*: `int8` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `https://github.com/mlcommons/inference_results_v3.1/blob/main/closed/Qualcomm/calibration.md` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - Workflow: + * `_retinanet,qaic,uint8` + - Environment variables: + - *kilt_device_qaic_skip_stage*: `convert` + - *kilt_input_format*: `UINT8,1,3,800,800` + - *kilt_output_format*: `INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,4,1000:INT8,14,1000:INT8,1,4,1000:INT8,1,4,1000:INT8,1,4,1000` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `https://github.com/mlcommons/inference_results_v3.1/blob/main/closed/Qualcomm/calibration.md` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - Workflow: + * `_singlestream,resnet50` + - Workflow: + * `_singlestream,retinanet` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_bs.#` + - Environment variables: + - *kilt_model_batch_size*: `#` + - Workflow: + * `_bs.0` + - Environment variables: + - *kilt_model_batch_size*: `1` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - *kilt_backend_type*: `cpu` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - *kilt_backend_type*: `gpu` + - Workflow: + * `_qaic` + - Environment variables: + - *CM_MLPERF_DEVICE*: `qaic` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `QAic` + - *kilt_backend_type*: `qaic` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,qaic,platform,sdk + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + - CM script: [get-qaic-platform-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-platform-sdk) + * get,lib,protobuf,_tag.v3.11.4 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + - CM script: [get-lib-protobuf](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-protobuf) + * set,device,mode,qaic + * Enable this dependency only if all ENV vars are set:
+`{'CM_QAIC_VC': 'on'}` + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + * set,device,mode,qaic,_ecc + * Enable this dependency only if all ENV vars are set:
+`{'CM_QAIC_ECC': 'yes'}` + - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_glow` + - Environment variables: + - *device*: `qaic` + - *CM_MLPERF_BACKEND*: `glow` + - *CM_MLPERF_BACKEND_LIB_NAMESPEC*: `QAic` + - Workflow: + * **`_onnxruntime`** (default) + - Environment variables: + - *device*: `onnxrt` + - *CM_MLPERF_BACKEND*: `onnxruntime` + - *CM_MLPERF_BACKEND_LIB_NAMESPEC*: `onnxruntime` + - Workflow: + * `_tensorrt` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tensorrt` + - *device*: `tensorrt` + - *CM_MLPERF_BACKEND_NAME*: `TensorRT` + - Workflow: + +
+ + + * Group "**loadgen-batch-size**" +
+ Click here to expand this section. + + * `_loadgen-batch-size.#` + - Environment variables: + - *CM_MLPERF_LOADGEN_BATCH_SIZE*: `#` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * `_offline` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_bert-99` + - Environment variables: + - *CM_MODEL*: `bert-99` + - *CM_SQUAD_ACCURACY_DTYPE*: `float32` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_MODEL*: `bert-99.9` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` + - Workflow: + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - *kilt_model_name*: `resnet50` + - *kilt_input_count*: `1` + - *kilt_output_count*: `1` + - *kilt_input_format*: `FLOAT32,-1,224,224,3` + - *kilt_output_format*: `INT64,-1` + - *dataset_imagenet_preprocessed_input_square_side*: `224` + - *ml_model_has_background_class*: `YES` + - *ml_model_image_height*: `224` + - *loadgen_buffer_size*: `1024` + - *loadgen_dataset_size*: `50000` + - *CM_BENCHMARK*: `STANDALONE_CLASSIFICATION` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/6617981/files/resnext50_32x4d_fpn.pth` + - *kilt_model_name*: `retinanet` + - *kilt_input_count*: `1` + - *kilt_model_max_detections*: `600` + - *kilt_output_count*: `1` + - *kilt_input_format*: `FLOAT32,-1,3,800,800` + - *kilt_output_format*: `INT64,-1` + - *dataset_imagenet_preprocessed_input_square_side*: `224` + - *ml_model_image_height*: `800` + - *ml_model_image_width*: `800` + - *loadgen_buffer_size*: `64` + - *loadgen_dataset_size*: `24576` + - *CM_BENCHMARK*: `STANDALONE_OBJECT_DETECTION` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_Pillow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycocotools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**nsp**" +
+ Click here to expand this section. + + * `_nsp.#` + - Workflow: + * `_nsp.14` + - Workflow: + * `_nsp.16` + - Workflow: + +
+ + + * Group "**power-mode**" +
+ Click here to expand this section. + + * `_maxn` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_MAXN*: `True` + - Workflow: + * `_maxq` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_MAXQ*: `True` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_fp16` + - Workflow: + * `_fp32` + - Environment variables: + - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` + - Workflow: + * `_uint8` + - Workflow: + +
+ + + * Group "**run-mode**" +
+ Click here to expand this section. + + * `_network-client` + - Environment variables: + - *CM_RUN_MODE*: `network-client` + - Workflow: + * `_network-server` + - Environment variables: + - *CM_RUN_MODE*: `network-server` + - Workflow: + * **`_standalone`** (default) + - Environment variables: + - *CM_RUN_MODE*: `standalone` + - Workflow: + +
+ + + * Group "**sut**" +
+ Click here to expand this section. + + * `_dl2q.24xlarge` + - Environment variables: + - *CM_QAIC_DEVICES*: `0,1,2,3,4,5,6,7` + - *qaic_queue_length*: `4` + - Workflow: + * `_rb6` + - Environment variables: + - *CM_QAIC_DEVICES*: `0` + - *qaic_queue_length*: `6` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_onnxruntime,_resnet50,_standalone` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--devices=value` → `CM_QAIC_DEVICES=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--rerun=value` → `CM_RERUN=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--skip_preprocess=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--skip_preprocessing=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "count":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` +* CM_FAST_COMPILATION: `yes` +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_MLPERF_LOADGEN_MODE: `performance` +* CM_SKIP_PREPROCESS_DATASET: `no` +* CM_SKIP_MODEL_DOWNLOAD: `no` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `kilt` +* CM_MLPERF_SKIP_RUN: `no` +* CM_KILT_REPO_URL: `https://github.com/GATEOverflow/kilt-mlperf` +* CM_QAIC_DEVICES: `0` +* kilt_max_wait_abs: `10000` +* verbosity: `0` +* loadgen_trigger_cold_run: `0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,git,repo + * CM names: `--adr.['kilt-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlcommons,inference,loadgen + * CM names: `--adr.['inference-loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * generate,user-conf,mlperf,inference + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + * get,generic-python-lib,_mlperf_logging + * CM names: `--adr.['mlperf-logging']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,ml-model,resnet50,_fp32,_onnx,_from-tf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['qaic']}` + * CM names: `--adr.['resnet50-model', 'ml-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * compile,qaic,model,_resnet50 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50'], 'CM_MLPERF_DEVICE': ['qaic']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['qaic-model-compiler', 'resnet50-compiler']...` + - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) + * get,dataset,imagenet,preprocessed,_for.resnet50,_NHWC,_full + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['imagenet-preprocessed', 'dataset-preprocessed']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,squad-vocab + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['bert-vocab']...` + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + * get,dataset,tokenized,squad,_raw + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['squad-tokenized']...` + - CM script: [get-preprocessed-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-squad) + * compile,qaic,model,_retinanet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet'], 'CM_MLPERF_DEVICE': ['qaic']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['qaic-model-compiler', 'retinanet-compiler']...` + - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) + * get,dataset,preprocessed,openimages,_for.retinanet.onnx,_NCHW,_validation,_custom-annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['openimages-preprocessed', 'dataset-preprocessed']...` + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + * get,lib,onnxruntime,lang-cpp,_cpu + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['cpu']}` + - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) + * get,lib,onnxruntime,lang-cpp,_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu']}` + - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/_cm.yaml)*** + * compile,cpp-program + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': [True]}` + * CM names: `--adr.['compile-program']...` + - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) + * benchmark-mlperf + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes', True]}` + * CM names: `--adr.['runner', 'mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_HW_NAME` +* `CM_IMAGENET_ACCURACY_DTYPE` +* `CM_MAX_EXAMPLES` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +* `CM_SQUAD_ACCURACY_DTYPE` +#### New environment keys auto-detected from customize + +* `CM_DATASET_LIST` +* `CM_MLPERF_CONF` +* `CM_MLPERF_DEVICE` +* `CM_MLPERF_USER_CONF` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python.md new file mode 100644 index 000000000..d4b87036e --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python.md @@ -0,0 +1,331 @@ +Automatically generated README for this automation recipe: **app-loadgen-generic-python** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Developers: [Gaz Iqbal](https://www.linkedin.com/in/gaziqbal), [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-loadgen-generic-python,d3d949cc361747a6) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *python,app,generic,loadgen* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "python app generic loadgen" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=python,app,generic,loadgen` + +`cm run script --tags=python,app,generic,loadgen[,variations] [--input_flags]` + +*or* + +`cmr "python app generic loadgen"` + +`cmr "python app generic loadgen [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**modelpath**=Full path to file with model weights +* --**modelcodepath**=(for PyTorch models) Full path to file with model code and cmc.py +* --**modelcfgpath**=(for PyTorch models) Full path to JSON file with model cfg +* --**modelsamplepath**=(for PyTorch models) Full path to file with model sample in pickle format +* --**ep**=ONNX Execution provider +* --**scenario**=MLPerf LoadGen scenario +* --**samples**=Number of samples (*2*) +* --**runner**=MLPerf runner +* --**execmode**=MLPerf exec mode +* --**output_dir**=MLPerf output directory +* --**concurrency**=MLPerf concurrency +* --**intraop**=MLPerf intra op threads +* --**interop**=MLPerf inter op threads + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "modelpath":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'python,app,generic,loadgen' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="python,app,generic,loadgen"``` + +#### Run this script via Docker (beta) + +`cm docker script "python app generic loadgen[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_cmc` + - Environment variables: + - *CM_CUSTOM_MODEL_CMC*: `True` + - Workflow: + * `_custom,cmc` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,cmc + - CM script: [get-ml-model-abtf-ssd-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/get-ml-model-abtf-ssd-pytorch) + * `_custom,huggingface` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,ml-model,huggingface + * CM names: `--adr.['hf-downloader']...` + - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) + * `_huggingface` + - Environment variables: + - *CM_CUSTOM_MODEL_SOURCE*: `huggingface` + - Workflow: + * `_model-stub.#` + - Environment variables: + - *CM_ML_MODEL_STUB*: `#` + - Workflow: + +
+ + + * Group "**backend**" +
+ Click here to expand this section. + + * **`_onnxruntime`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `onnxruntime` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - *CM_MLPERF_EXECUTION_PROVIDER*: `CPUExecutionProvider` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_EXECUTION_PROVIDER*: `CUDAExecutionProvider` + - Workflow: + +
+ + + * Group "**models**" +
+ Click here to expand this section. + + * `_custom` + - Environment variables: + - *CM_MODEL*: `custom` + - Workflow: + * `_resnet50` + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_onnxruntime` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--concurrency=value` → `CM_MLPERF_CONCURRENCY=value` +* `--ep=value` → `CM_MLPERF_EXECUTION_PROVIDER=value` +* `--execmode=value` → `CM_MLPERF_EXEC_MODE=value` +* `--interop=value` → `CM_MLPERF_INTEROP=value` +* `--intraop=value` → `CM_MLPERF_INTRAOP=value` +* `--loadgen_duration_sec=value` → `CM_MLPERF_LOADGEN_DURATION_SEC=value` +* `--loadgen_expected_qps=value` → `CM_MLPERF_LOADGEN_EXPECTED_QPS=value` +* `--modelcfg=value` → `CM_ML_MODEL_CFG=value` +* `--modelcfgpath=value` → `CM_ML_MODEL_CFG_WITH_PATH=value` +* `--modelcodepath=value` → `CM_ML_MODEL_CODE_WITH_PATH=value` +* `--modelpath=value` → `CM_ML_MODEL_FILE_WITH_PATH=value` +* `--modelsamplepath=value` → `CM_ML_MODEL_SAMPLE_WITH_PATH=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--runner=value` → `CM_MLPERF_RUNNER=value` +* `--samples=value` → `CM_MLPERF_LOADGEN_SAMPLES=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "concurrency":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_EXECUTION_MODE: `parallel` +* CM_MLPERF_BACKEND: `onnxruntime` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_psutil + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['gpu']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,loadgen + * CM names: `--adr.['loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * get,generic-python-lib,_onnxruntime + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['cpu']}` + * CM names: `--adr.['onnxruntime']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime_gpu + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu']}` + * CM names: `--adr.['onnxruntime']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime']}` + * CM names: `--adr.['onnx']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['cpu']}` + * CM names: `--adr.['torch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['cpu']}` + * CM names: `--adr.['torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['gpu']}` + * CM names: `--adr.['torch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['gpu']}` + * CM names: `--adr.['torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,ml-model,resnet50,_onnx + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,ml-model,retinanet,_onnx,_fp32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + * get,ml-model,retinanet,_onnx,_fp32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/_cm.yaml) + +___ +### Script output +`cmr "python app generic loadgen [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_*` +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite.md new file mode 100644 index 000000000..dce3fd1b7 --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite.md @@ -0,0 +1,382 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-ctuning-cpp-tflite** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-ctuning-cpp-tflite,415904407cca404a) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *app,mlperf,inference,tflite-cpp* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app mlperf inference tflite-cpp" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,mlperf,inference,tflite-cpp` + +`cm run script --tags=app,mlperf,inference,tflite-cpp[,variations] [--input_flags]` + +*or* + +`cmr "app mlperf inference tflite-cpp"` + +`cmr "app mlperf inference tflite-cpp [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,mlperf,inference,tflite-cpp' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,mlperf,inference,tflite-cpp"``` + +#### Run this script via Docker (beta) + +`cm docker script "app mlperf inference tflite-cpp[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_armnn` + - Environment variables: + - *CM_MLPERF_TFLITE_USE_ARMNN*: `yes` + - *CM_TMP_LINK_LIBS*: `tensorflowlite,armnn` + - Workflow: + * `_armnn,tflite` + - Environment variables: + - *CM_MLPERF_BACKEND*: `armnn_tflite` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - *CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX*: `tflite_armnn_cpp` + - *CM_TMP_LINK_LIBS*: `tensorflowlite,armnn,armnnTfLiteParser` + - *CM_TMP_SRC_FOLDER*: `armnn` + - Workflow: + +
+ + + * Group "**backend**" +
+ Click here to expand this section. + + * `_tf` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tf` + - Workflow: + * **`_tflite`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `tflite` + - *CM_MLPERF_BACKEND_VERSION*: `master` + - *CM_TMP_LINK_LIBS*: `tensorflowlite` + - *CM_TMP_SRC_FOLDER*: `src` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + * `_gpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * **`_singlestream`** (default) + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_efficientnet` + - Environment variables: + - *CM_MODEL*: `efficientnet` + - Workflow: + * `_mobilenet` + - Environment variables: + - *CM_MODEL*: `mobilenet` + - Workflow: + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + +
+ + + * Group "**optimization-target**" +
+ Click here to expand this section. + + * `_use-neon` + - Environment variables: + - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX1*: `using_neon` + - *CM_MLPERF_TFLITE_USE_NEON*: `1` + - Workflow: + * `_use-opencl` + - Environment variables: + - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX1*: `using_opencl` + - *CM_MLPERF_TFLITE_USE_OPENCL*: `1` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * **`_fp32`** (default) + - Environment variables: + - *CM_MLPERF_MODEL_PRECISION*: `float32` + - Workflow: + * `_int8` + - Environment variables: + - *CM_DATASET_COMPRESSED*: `on` + - *CM_MLPERF_MODEL_PRECISION*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_DATASET_COMPRESSED*: `on` + - *CM_MLPERF_MODEL_PRECISION*: `uint8` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_fp32,_resnet50,_singlestream,_tflite` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--compressed_dataset=value` → `CM_DATASET_COMPRESSED=value` +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` +* `--verbose=value` → `CM_VERBOSE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "compressed_dataset":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_COMPRESSED: `off` +* CM_DATASET_INPUT_SQUARE_SIDE: `224` +* CM_FAST_COMPILATION: `yes` +* CM_LOADGEN_BUFFER_SIZE: `1024` +* CM_MLPERF_LOADGEN_MODE: `accuracy` +* CM_MLPERF_LOADGEN_SCENARIO: `SingleStream` +* CM_MLPERF_LOADGEN_TRIGGER_COLD_RUN: `0` +* CM_MLPERF_OUTPUT_DIR: `.` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `tflite_cpp` +* CM_MLPERF_TFLITE_USE_NEON: `0` +* CM_MLPERF_TFLITE_USE_OPENCL: `0` +* CM_ML_MODEL_GIVEN_CHANNEL_MEANS: `123.68 116.78 103.94` +* CM_ML_MODEL_NORMALIZE_DATA: `0` +* CM_ML_MODEL_SUBTRACT_MEANS: `1` +* CM_VERBOSE: `0` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['gpu']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,loadgen + * CM names: `--adr.['loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,ml-model,mobilenet,raw,_tflite + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tflite', 'armnn_tflite'], 'CM_MODEL': ['mobilenet']}` + * CM names: `--adr.['ml-model', 'tflite-model', 'mobilenet-model']...` + - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) + * get,ml-model,resnet50,raw,_tflite,_no-argmax + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tflite', 'armnn_tflite'], 'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['ml-model', 'tflite-model', 'resnet50-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,ml-model,resnet50,raw,_tf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tf'], 'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['ml-model', 'tflite-model', 'resnet50-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,ml-model,efficientnet,raw,_tflite + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tflite', 'armnn_tflite'], 'CM_MODEL': ['efficientnet']}` + * CM names: `--adr.['ml-model', 'tflite-model', 'efficientnet-model']...` + - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) + * get,tensorflow,lib,_tflite + - CM script: [install-tensorflow-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-from-src) + * get,lib,armnn + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_TFLITE_USE_ARMNN': ['yes']}` + * CM names: `--adr.['armnn', 'lib-armnn']...` + - CM script: [get-lib-armnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-armnn) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/_cm.json)*** + * generate,user-conf,mlperf,inference + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + * get,dataset,preprocessed,imagenet,_for.resnet50,_rgb32,_NHWC + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['no'], 'CM_MODEL': ['resnet50']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_COMPRESSED': ['on']}` + * CM names: `--adr.['imagenet-preprocessed', 'preprocessed-dataset']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset,preprocessed,imagenet,_for.mobilenet,_rgb32,_NHWC + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['no'], 'CM_MODEL': ['mobilenet', 'efficientnet']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_COMPRESSED': ['on']}` + * CM names: `--adr.['imagenet-preprocessed', 'preprocessed-dataset']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset,preprocessed,imagenet,_for.mobilenet,_rgb8,_NHWC + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_COMPRESSED': ['on'], 'CM_MLPERF_SKIP_RUN': ['no'], 'CM_MODEL': ['mobilenet', 'efficientnet']}` + * CM names: `--adr.['imagenet-preprocessed', 'preprocessed-dataset']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset,preprocessed,imagenet,_for.resnet50,_rgb8,_NHWC + * Enable this dependency only if all ENV vars are set:
+`{'CM_DATASET_COMPRESSED': ['on'], 'CM_MLPERF_SKIP_RUN': ['no'], 'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['imagenet-preprocessed', 'preprocessed-dataset']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/_cm.json)*** + * compile,program + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes']}` + * CM names: `--adr.['compiler-program']...` + - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) + * benchmark-mlperf + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes']}` + * CM names: `--adr.['mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "app mlperf inference tflite-cpp [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_HW_NAME` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_CONF` +* `CM_MLPERF_DEVICE` +* `CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX2` +* `CM_MLPERF_USER_CONF` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp.md new file mode 100644 index 000000000..35b59a51b --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp.md @@ -0,0 +1,336 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-mlcommons-cpp** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Developers: [Thomas Zhu](https://www.linkedin.com/in/hanwen-zhu-483614189), [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-mlcommons-cpp,bf62405e6c7a44bf) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *app,mlcommons,mlperf,inference,cpp* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app mlcommons mlperf inference cpp" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,mlcommons,mlperf,inference,cpp` + +`cm run script --tags=app,mlcommons,mlperf,inference,cpp[,variations] [--input_flags]` + +*or* + +`cmr "app mlcommons mlperf inference cpp"` + +`cmr "app mlcommons mlperf inference cpp [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,mlcommons,mlperf,inference,cpp' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,mlcommons,mlperf,inference,cpp"``` + +#### Run this script via Docker (beta) + +`cm docker script "app mlcommons mlperf inference cpp[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_multistream,resnet50` + - Workflow: + * `_multistream,retinanet` + - Workflow: + * `_offline,resnet50` + - Workflow: + * `_resnet50,multistream` + - Workflow: + * `_resnet50,offline` + - Workflow: + * `_resnet50,server` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_batch-size.#` + - Environment variables: + - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `#` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * **`_onnxruntime`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `onnxruntime` + - *CM_MLPERF_BACKEND_LIB_NAMESPEC*: `onnxruntime` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - Workflow: + * `_tf` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tf` + - Workflow: + * `_tflite` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tflite` + - Workflow: + * `_tvm-onnx` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-onnx` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * **`_offline`** (default) + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `1` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_offline,_onnxruntime,_resnet50` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "count":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` +* CM_FAST_COMPILATION: `yes` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `cpp` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,cuda,_cudnn + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['gpu']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,loadgen + * CM names: `--adr.['loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,lib,onnxruntime,lang-cpp,_cpu + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['cpu']}` + - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) + * get,lib,onnxruntime,lang-cpp,_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu']}` + - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) + * get,dataset,preprocessed,imagenet,_NCHW + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['imagenet-preprocessed']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,ml-model,raw,resnet50,_onnx + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,dataset,preprocessed,openimages,_validation,_NCHW + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['openimages-preprocessed']...` + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + * get,ml-model,retinanet,_onnx,_fp32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + * generate,user-conf,mlperf,inference + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/_cm.yaml)*** + * compile,cpp-program + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes']}` + * CM names: `--adr.['compile-program']...` + - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) + * benchmark-mlperf + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes']}` + * CM names: `--adr.['mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "app mlcommons mlperf inference cpp [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_HW_NAME` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_DATASET_LIST` +* `CM_MLPERF_CONF` +* `CM_MLPERF_DEVICE` +* `CM_MLPERF_USER_CONF` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python.md new file mode 100644 index 000000000..d8e825fc8 --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python.md @@ -0,0 +1,944 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-mlcommons-python** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Developers: [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Thomas Zhu](https://www.linkedin.com/in/hanwen-zhu-483614189), [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-mlcommons-python,ff149e9781fc4b65) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- + +This portable CM script is being developed by the [MLCommons taskforce on automation and reproducibility](https://github.com/mlcommons/ck/blob/master/docs/mlperf-education-workgroup.md) +to modularize the *python reference implementations* of the [MLPerf inference benchmark](https://github.com/mlcommons/inference) +using the [MLCommons CM automation meta-framework](https://github.com/mlcommons/ck). +The goal is to make it easier to run, optimize and reproduce MLPerf benchmarks +across diverse platforms with continuously changing software and hardware. + +See the current coverage of different models, devices and backends [here](README-extra.md#current-coverage). + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *app,vision,language,mlcommons,mlperf,inference,reference,ref* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app vision language mlcommons mlperf inference reference ref" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,vision,language,mlcommons,mlperf,inference,reference,ref` + +`cm run script --tags=app,vision,language,mlcommons,mlperf,inference,reference,ref[,variations] [--input_flags]` + +*or* + +`cmr "app vision language mlcommons mlperf inference reference ref"` + +`cmr "app vision language mlcommons mlperf inference reference ref [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,vision,language,mlcommons,mlperf,inference,reference,ref' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,vision,language,mlcommons,mlperf,inference,reference,ref"``` + +#### Run this script via Docker (beta) + +`cm docker script "app vision language mlcommons mlperf inference reference ref[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_gptj_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.datasets + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.attrs + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.accelerate + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_llama2-70b_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.transformers + * CM names: `--adr.['transformers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.datasets + * CM names: `--adr.['datasets']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.sentencepiece + * CM names: `--adr.['sentencepiece']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.protobuf + * CM names: `--adr.['protobuf']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.accelerate + * CM names: `--adr.['accelerate']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.absl-py + * CM names: `--adr.['absl-py']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.evaluate + * CM names: `--adr.['evaluate']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.nltk + * CM names: `--adr.['nltk']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.rouge-score + * CM names: `--adr.['rouge-score']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_3d-unet` + - Environment variables: + - *CM_TMP_IGNORE_MLPERF_QUERY_COUNT*: `True` + - *CM_MLPERF_MODEL_SKIP_BATCHING*: `True` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.nibabel + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_beam_size.#` + - Environment variables: + - *GPTJ_BEAM_SIZE*: `#` + - Workflow: + * `_bert` + - Environment variables: + - *CM_MLPERF_MODEL_SKIP_BATCHING*: `True` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.pydantic + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tokenization + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_six + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.absl-py + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_protobuf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` + * CM names: `--adr.['protobuf']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_boto3 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['gpu']}` + * CM names: `--adr.['ml-engine-pytorch', 'pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_dlrm` + - Environment variables: + - *CM_MLPERF_MODEL_SKIP_BATCHING*: `True` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dlrm,src + * CM names: `--adr.['dlrm-src']...` + - CM script: [get-dlrm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dlrm) + * get,generic-python-lib,_mlperf_logging + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tensorboard + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_protobuf + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_scikit-learn + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tqdm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torchrec + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.pyre-extensions + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torchsnapshot + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_llama2-70b_,cuda` + - Workflow: + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * `_offline` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_onnxruntime,cpu` + - Environment variables: + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_onnxruntime,cuda` + - Environment variables: + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - *ONNXRUNTIME_PREFERRED_EXECUTION_PROVIDER*: `CUDAExecutionProvider` + - Workflow: + * `_onnxruntime,rocm` + - Environment variables: + - *ONNXRUNTIME_PREFERRED_EXECUTION_PROVIDER*: `ROCMExecutionProvider` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_pytorch,rocm` + - Workflow: + * `_r2.1_default` + - Environment variables: + - *CM_RERUN*: `yes` + - *CM_SKIP_SYS_UTILS*: `yes` + - *CM_TEST_QUERY_COUNT*: `100` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - Workflow: + * `_tf,rocm` + - Environment variables: + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_tpu,tflite` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `#` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - *CUDA_VISIBLE_DEVICES*: `` + - *USE_CUDA*: `False` + - *USE_GPU*: `False` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *USE_CUDA*: `True` + - *USE_GPU*: `True` + - Workflow: + * `_rocm` + - Environment variables: + - *CM_MLPERF_DEVICE*: `rocm` + - *USE_GPU*: `True` + - Workflow: + * `_tpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `tpu` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_deepsparse` + - Environment variables: + - *CM_MLPERF_BACKEND*: `deepsparse` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_deepsparse + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_HOST_PLATFORM_FLAVOR': ['aarch64']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.deepsparse-nightly + * Enable this dependency only if all ENV vars are set:
+`{'CM_HOST_PLATFORM_FLAVOR': ['aarch64']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_ncnn` + - Environment variables: + - *CM_MLPERF_BACKEND*: `ncnn` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - *CM_MLPERF_VISION_DATASET_OPTION*: `imagenet_pytorch` + - Workflow: + * **`_onnxruntime`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `onnxruntime` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_ray` + - Environment variables: + - *CM_MLPERF_BACKEND*: `ray` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_tf` + - Aliases: `_tensorflow` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tf` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_tflite` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tflite` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - *CM_MLPERF_VISION_DATASET_OPTION*: `imagenet_tflite_tpu` + - Workflow: + * `_tvm-onnx` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-onnx` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,tvm + * CM names: `--adr.['tvm']...` + - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) + * get,tvm-model,_onnx + * CM names: `--adr.['tvm-model']...` + - CM script: [get-tvm-model](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm-model) + * `_tvm-pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-pytorch` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - *CM_PREPROCESS_PYTORCH*: `yes` + - *MLPERF_TVM_TORCH_QUANTIZED_ENGINE*: `qnnpack` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,tvm + * CM names: `--adr.['tvm']...` + - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) + * get,tvm-model,_pytorch + * CM names: `--adr.['tvm-model']...` + - CM script: [get-tvm-model](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm-model) + * `_tvm-tflite` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-tflite` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_tflite + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,tvm + * CM names: `--adr.['tvm']...` + - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) + * get,tvm-model,_tflite + * CM names: `--adr.['tvm-model']...` + - CM script: [get-tvm-model](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm-model) + +
+ + + * Group "**implementation**" +
+ Click here to expand this section. + + * **`_python`** (default) + - Environment variables: + - *CM_MLPERF_PYTHON*: `yes` + - *CM_MLPERF_IMPLEMENTATION*: `reference` + - Workflow: + +
+ + + * Group "**models**" +
+ Click here to expand this section. + + * `_3d-unet-99` + - Environment variables: + - *CM_MODEL*: `3d-unet-99` + - Workflow: + * `_3d-unet-99.9` + - Environment variables: + - *CM_MODEL*: `3d-unet-99.9` + - Workflow: + * `_bert-99` + - Environment variables: + - *CM_MODEL*: `bert-99` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_MODEL*: `bert-99.9` + - Workflow: + * `_dlrm-99` + - Environment variables: + - *CM_MODEL*: `dlrm-99` + - Workflow: + * `_dlrm-99.9` + - Environment variables: + - *CM_MODEL*: `dlrm-99.9` + - Workflow: + * `_gptj-99` + - Environment variables: + - *CM_MODEL*: `gptj-99` + - Workflow: + * `_gptj-99.9` + - Environment variables: + - *CM_MODEL*: `gptj-99.9` + - Workflow: + * `_llama2-70b-99` + - Environment variables: + - *CM_MODEL*: `llama2-70b-99` + - Workflow: + * `_llama2-70b-99.9` + - Environment variables: + - *CM_MODEL*: `llama2-70b-99.9` + - Workflow: + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - *CM_MLPERF_USE_MLCOMMONS_RUN_SCRIPT*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycocotools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Read "prehook_deps" on other CM scripts*** + * get,generic-python-lib,_protobuf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` + * CM names: `--adr.['protobuf']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - *CM_MLPERF_USE_MLCOMMONS_RUN_SCRIPT*: `yes` + - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycocotools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_rnnt` + - Environment variables: + - *CM_MODEL*: `rnnt` + - *CM_MLPERF_MODEL_SKIP_BATCHING*: `True` + - *CM_TMP_IGNORE_MLPERF_QUERY_COUNT*: `True` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.pydantic + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_librosa + * CM names: `--adr.['librosa']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_inflect + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_unidecode + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_toml + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_sdxl` + - Environment variables: + - *CM_MODEL*: `stable-diffusion-xl` + - *CM_NUM_THREADS*: `1` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.diffusers + * CM names: `--adr.['diffusers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.transformers + * CM names: `--adr.['transformers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.accelerate + * CM names: `--adr.['accelerate']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torchmetrics + * CM names: `--adr.['torchmetrics']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torch-fidelity + * CM names: `--adr.['torch-fidelity']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.open_clip_torch + * CM names: `--adr.['open-clip']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.opencv-python + * CM names: `--adr.['opencv-python']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.scipy + * CM names: `--adr.['scipy']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**network**" +
+ Click here to expand this section. + + * `_network-lon` + - Environment variables: + - *CM_NETWORK_LOADGEN*: `lon` + - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX1*: `network_loadgen` + - Workflow: + * `_network-sut` + - Environment variables: + - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX1*: `network_sut` + - *CM_NETWORK_LOADGEN*: `sut` + - Workflow: + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_bfloat16` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `False` + - *CM_MLPERF_MODEL_PRECISION*: `bfloat16` + - Workflow: + * `_float16` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `False` + - *CM_MLPERF_MODEL_PRECISION*: `float16` + - Workflow: + * **`_fp32`** (default) + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `False` + - *CM_MLPERF_MODEL_PRECISION*: `float32` + - Workflow: + * `_int8` + - Aliases: `_quantized` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `True` + - *CM_MLPERF_MODEL_PRECISION*: `int8` + - Workflow: + +
+ + +#### Default variations + +`_cpu,_fp32,_onnxruntime,_python,_resnet50` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--dataset=value` → `CM_MLPERF_VISION_DATASET_OPTION=value` +* `--dataset_args=value` → `CM_MLPERF_EXTRA_DATASET_ARGS=value` +* `--docker=value` → `CM_RUN_DOCKER_CONTAINER=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--imagenet_path=value` → `IMAGENET_PATH=value` +* `--max_amps=value` → `CM_MLPERF_POWER_MAX_AMPS=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--max_volts=value` → `CM_MLPERF_POWER_MAX_VOLTS=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--model=value` → `CM_MLPERF_CUSTOM_MODEL_PATH=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--network=value` → `CM_NETWORK_LOADGEN=value` +* `--ntp_server=value` → `CM_MLPERF_POWER_NTP_SERVER=value` +* `--num_threads=value` → `CM_NUM_THREADS=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `OUTPUT_BASE_DIR=value` +* `--power=value` → `CM_MLPERF_POWER=value` +* `--power_server=value` → `CM_MLPERF_POWER_SERVER_ADDRESS=value` +* `--regenerate_files=value` → `CM_REGENERATE_MEASURE_FILES=value` +* `--rerun=value` → `CM_RERUN=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--sut_servers=value` → `CM_NETWORK_LOADGEN_SUT_SERVERS=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--test_query_count=value` → `CM_TEST_QUERY_COUNT=value` +* `--threads=value` → `CM_NUM_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "clean":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_LOADGEN_MODE: `accuracy` +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_OUTPUT_FOLDER_NAME: `test_results` +* CM_MLPERF_RUN_STYLE: `test` +* CM_TEST_QUERY_COUNT: `10` +* CM_MLPERF_QUANTIZATION: `False` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `reference` +* CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX: `` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cuda,_cudnn + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['gpu'], 'CM_MLPERF_BACKEND': ['onnxruntime', 'tf', 'tflite', 'pytorch']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,nvidia,tensorrt + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tensorrt']}` + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * get,generic-python-lib,_onnxruntime + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime', 'tvm-onnx'], 'CM_MLPERF_DEVICE': ['cpu', 'rocm']}` + * CM names: `--adr.['ml-engine-onnxruntime', 'onnxruntime']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime_gpu + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime', 'tvm-onnx'], 'CM_MLPERF_DEVICE': ['gpu']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MODEL': ['3d-unet-99', '3d-unet-99.9']}` + * CM names: `--adr.['ml-engine-onnxruntime-cuda']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu'], 'CM_MODEL': ['3d-unet-99', '3d-unet-99.9', 'resnet50']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnxruntime_gpu + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu'], 'CM_MODEL': ['3d-unet-99', '3d-unet-99.9', 'resnet50']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch', 'tvm-pytorch'], 'CM_MLPERF_DEVICE': ['cpu', 'rocm']}` + * CM names: `--adr.['ml-engine-pytorch', 'pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch', 'tvm-pytorch', 'ray'], 'CM_MLPERF_DEVICE': ['gpu']}` + * CM names: `--adr.['ml-engine-pytorch', 'pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch', 'tvm-pytorch'], 'CM_MLPERF_DEVICE': ['cpu']}` + * CM names: `--adr.['ml-engine-torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch', 'tvm-pytorch', 'ray'], 'CM_MLPERF_DEVICE': ['gpu']}` + * CM names: `--adr.['ml-engine-torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tensorrt + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['ray']}` + * CM names: `--adr.['ml-engine-tensorrt']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch_tensorrt + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['ray']}` + * CM names: `--adr.['ml-engine-torch_tensorrt']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_ray + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['ray']}` + * CM names: `--adr.['ray']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_async_timeout + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['ray']}` + * CM names: `--adr.['async_timeout']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_transformers + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9', 'gptj-99', 'gptj-99.9']}` + * CM names: `--adr.['ml-engine-transformers']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_tensorflow + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` + * CM names: `--adr.['ml-engine-tensorflow', 'tensorflow']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.ncnn + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['ncnn']}` + * CM names: `--adr.['ml-engine-ncnn']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,ml-model,neural-magic,zoo + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_NEURALMAGIC_MODEL_ZOO_STUB': ['on']}` + * CM names: `--adr.['custom-ml-model']...` + - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) + * get,ml-model,image-classification,resnet50 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_CUSTOM_MODEL_PATH': ['on']}` + * CM names: `--adr.['ml-model', 'resnet50-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,ml-model,object-detection,retinanet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['ml-model', 'retinanet-model']...` + - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) + * get,ml-model,large-language-model,gptj + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['gptj-99', 'gptj-99.9']}` + * CM names: `--adr.['ml-model', 'gptj-model', 'gpt-j-model']...` + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + * get,ml-model,object-detection,resnext50,fp32,_pytorch-weights + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_IMPLEMENTATION': ['nvidia'], 'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['ml-model', 'retinanet-model']...` + - *Warning: no scripts found* + * get,ml-model,language-processing,bert-large + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_CUSTOM_MODEL_PATH': ['on']}` + * CM names: `--adr.['ml-model', 'bert-model']...` + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + * get,ml-model,stable-diffusion,text-to-image,sdxl + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['stable-diffusion-xl']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_CUSTOM_MODEL_PATH': ['on']}` + * CM names: `--adr.['ml-model', 'sdxl-model']...` + - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) + * get,ml-model,llama2 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['llama2-70b-99', 'llama2-70b-99.9']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_CUSTOM_MODEL_PATH': ['on']}` + * CM names: `--adr.['ml-model', 'llama2-model']...` + - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) + * get,ml-model,medical-imaging,3d-unet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['3d-unet-99', '3d-unet-99.9']}` + * CM names: `--adr.['ml-model', '3d-unet-model']...` + - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) + * get,ml-model,speech-recognition,rnnt + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['rnnt']}` + * CM names: `--adr.['ml-model', 'rnnt-model']...` + - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) + * get,ml-model,recommendation,dlrm + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['dlrm-99', 'dlrm-99.9']}` + * CM names: `--adr.['ml-model', 'dlrm-model']...` + - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) + * get,dataset,image-classification,imagenet,preprocessed + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_VISION_DATASET_OPTION': [True]}` + * CM names: `--adr.['imagenet-preprocessed']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset,image-classification,imagenet,preprocessed,_pytorch + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50'], 'CM_MLPERF_VISION_DATASET_OPTION': ['imagenet_pytorch']}` + * CM names: `--adr.['imagenet-preprocessed']...` + - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) + * get,dataset-aux,image-classification,imagenet-aux + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + * get,dataset,object-detection,open-images,openimages,preprocessed,_validation + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['openimages-preprocessed']...` + - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) + * get,dataset,cnndm,_validation + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['gptj-99', 'gptj-99.9']}` + * CM names: `--adr.['cnndm-preprocessed']...` + - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) + * get,dataset,squad,original + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * CM names: `--adr.['cnndm-preprocessed']...` + - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) + * get,dataset-aux,squad-vocab + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + * get,dataset,coco2014,_validation + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['stable-diffusion-xl']}` + * CM names: `--adr.['coco2014-preprocessed']...` + - CM script: [get-dataset-coco2014](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-coco2014) + * get,preprocessed,dataset,openorca,_validation + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['llama2-70b-99', 'llama2-70b-99.9']}` + * CM names: `--adr.['openorca-preprocessed']...` + - CM script: [get-preprocessed-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openorca) + * get,dataset,kits19,preprocessed + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['3d-unet-99', '3d-unet-99.9']}` + * CM names: `--adr.['kits19-preprocessed']...` + - CM script: [get-preprocessed-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-kits19) + * get,dataset,librispeech,preprocessed + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['rnnt']}` + * CM names: `--adr.['librispeech-preprocessed']...` + - CM script: [get-preprocessed-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-librispeech) + * get,dataset,criteo,preprocessed + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['dlrm-99', 'dlrm-99.9']}` + * CM names: `--adr.['criteo-preprocessed']...` + - CM script: [get-preprocessed-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-criteo) + * generate,user-conf,mlperf,inference + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + * get,loadgen + * CM names: `--adr.['loadgen', 'mlperf-inference-loadgen']...` + - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlcommons,inference,src + * CM names: `--adr.['mlperf-implementation']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,generic-python-lib,_package.psutil + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/_cm.yaml)*** + * remote,run,cmds + * Enable this dependency only if all ENV vars are set:
+`{'CM_ASSH_RUN_COMMANDS': ['on']}` + * CM names: `--adr.['remote-run-cmds']...` + - CM script: [remote-run-commands](https://github.com/mlcommons/cm4mlops/tree/master/script/remote-run-commands) + 1. ***Run native script if exists*** + 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/_cm.yaml)*** + * benchmark-mlperf + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['on']}` + * CM names: `--adr.['mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/_cm.yaml)*** + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "app vision language mlcommons mlperf inference reference ref [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_HW_NAME` +* `CM_MAX_EXAMPLES` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_BACKEND` +* `CM_MLPERF_CONF` +* `CM_MLPERF_DEVICE` +* `CM_MLPERF_LOADGEN_EXTRA_OPTIONS` +* `CM_MLPERF_LOADGEN_MODE` +* `CM_MLPERF_LOADGEN_QPS_OPT` +* `CM_MLPERF_LOADGEN_SCENARIO` +* `CM_MLPERF_OUTPUT_DIR` +* `CM_MLPERF_RUN_CMD` +* `CM_ML_MODEL_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference.md new file mode 100644 index 000000000..819393fee --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference.md @@ -0,0 +1,805 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Developers: [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Thomas Zhu](https://www.linkedin.com/in/hanwen-zhu-483614189), [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference,d775cac873ee4231) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- + +This CM script provides a unified interface to prepare and run a modular version of the [MLPerf inference benchmark](https://arxiv.org/abs/1911.02549) +across diverse ML models, data sets, frameworks, libraries, run-time systems and platforms +using the [cross-platform automation meta-framework (MLCommons CM)](https://github.com/mlcommons/ck). + +It is assembled from reusable and interoperable [CM scripts for DevOps and MLOps](../list_of_scripts.md) +being developed by the [open MLCommons taskforce on automation and reproducibility](../mlperf-education-workgroup.md). + +It is a higher-level wrapper to several other CM scripts modularizing the MLPerf inference benchmark: +* [Reference Python implementation](../app-mlperf-inference-reference) +* [Universal C++ implementation](../app-mlperf-inference-cpp) +* [TFLite C++ implementation](../app-mlperf-inference-tflite-cpp) +* [NVidia optimized implementation](app-mlperf-inference-nvidia) + +See [this SCC'23 tutorial](https://github.com/mlcommons/ck/blob/master/docs/tutorials/sc22-scc-mlperf.md) +to use this script to run a reference (unoptimized) Python implementation of the MLPerf object detection benchmark +with RetinaNet model, Open Images dataset, ONNX runtime and CPU target. + +See this [CM script](../run-mlperf-inference-app) to automate and validate your MLPerf inference submission. + +Get in touch with the [open taskforce on automation and reproducibility at MLCommons](https://github.com/mlcommons/ck/blob/master/docs/mlperf-education-workgroup.md) +if you need help with your submission or if you would like to participate in further modularization of MLPerf +and collaborative design space exploration and optimization of ML Systems. + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *app,vision,language,mlcommons,mlperf,inference,generic* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app vision language mlcommons mlperf inference generic" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,vision,language,mlcommons,mlperf,inference,generic` + +`cm run script --tags=app,vision,language,mlcommons,mlperf,inference,generic[,variations] [--input_flags]` + +*or* + +`cmr "app vision language mlcommons mlperf inference generic"` + +`cmr "app vision language mlcommons mlperf inference generic [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**scenario**=MLPerf inference scenario {Offline,Server,SingleStream,MultiStream} (*Offline*) +* --**mode**=MLPerf inference mode {performance,accuracy} (*accuracy*) +* --**test_query_count**=Specifies the number of samples to be processed during a test run +* --**target_qps**=Target QPS +* --**target_latency**=Target Latency +* --**max_batchsize**=Maximum batchsize to be used +* --**num_threads**=Number of CPU threads to launch the application with +* --**hw_name**=Valid value - any system description which has a config file (under same name) defined [here](https://github.com/mlcommons/cm4mlops/tree/main/script/get-configs-sut-mlperf-inference/configs) +* --**output_dir**=Location where the outputs are produced +* --**rerun**=Redo the run even if previous run files exist (*True*) +* --**regenerate_files**=Regenerates measurement files including accuracy.txt files even if a previous run exists. This option is redundant if `--rerun` is used +* --**adr.python.name**=Python virtual environment name (optional) (*mlperf*) +* --**adr.python.version_min**=Minimal Python version (*3.8*) +* --**adr.python.version**=Force Python version (must have all system deps) +* --**adr.compiler.tags**=Compiler for loadgen (*gcc*) +* --**adr.inference-src-loadgen.env.CM_GIT_URL**=Git URL for MLPerf inference sources to build LoadGen (to enable non-reference implementations) +* --**adr.inference-src.env.CM_GIT_URL**=Git URL for MLPerf inference sources to run benchmarks (to enable non-reference implementations) +* --**quiet**=Quiet run (select default values for all questions) (*False*) +* --**readme**=Generate README with the reproducibility report +* --**debug**=Debug MLPerf script + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "scenario":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,vision,language,mlcommons,mlperf,inference,generic' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,vision,language,mlcommons,mlperf,inference,generic"``` + +#### Run this script via Docker (beta) + +`cm docker script "app vision language mlcommons mlperf inference generic[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**implementation**" +
+ Click here to expand this section. + + * `_cpp` + - Aliases: `_mil,_mlcommons-cpp` + - Environment variables: + - *CM_MLPERF_CPP*: `yes` + - *CM_MLPERF_IMPLEMENTATION*: `mlcommons_cpp` + - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` + - *CM_OPENIMAGES_ACCURACY_DTYPE*: `float32` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * app,mlperf,cpp,inference + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': [True]}` + * CM names: `--adr.['cpp-mlperf-inference', 'mlperf-inference-implementation']...` + - CM script: [app-mlperf-inference-mlcommons-cpp](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-mlcommons-cpp) + * `_intel-original` + - Aliases: `_intel` + - Environment variables: + - *CM_MLPERF_IMPLEMENTATION*: `intel` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * reproduce,mlperf,inference,intel + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': [True]}` + * CM names: `--adr.['intel', 'intel-harness', 'mlperf-inference-implementation']...` + - CM script: [app-mlperf-inference-intel](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-intel) + * `_kilt` + - Aliases: `_qualcomm` + - Environment variables: + - *CM_MLPERF_IMPLEMENTATION*: `qualcomm` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * reproduce,mlperf,inference,kilt + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': [True]}` + * CM names: `--adr.['kilt', 'kilt-harness', 'mlperf-inference-implementation']...` + - CM script: [app-mlperf-inference-qualcomm](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-qualcomm) + * `_nvidia-original` + - Aliases: `_nvidia` + - Environment variables: + - *CM_MLPERF_IMPLEMENTATION*: `nvidia` + - *CM_SQUAD_ACCURACY_DTYPE*: `float16` + - *CM_IMAGENET_ACCURACY_DTYPE*: `int32` + - *CM_CNNDM_ACCURACY_DTYPE*: `int32` + - *CM_LIBRISPEECH_ACCURACY_DTYPE*: `int8` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda-devices + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_CUDA_DEVICE_PROP_GLOBAL_MEMORY': ['yes', 'on']}` + - CM script: [get-cuda-devices](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda-devices) + 1. ***Read "prehook_deps" on other CM scripts*** + * reproduce,mlperf,nvidia,inference,_run_harness + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': [True]}` + * CM names: `--adr.['nvidia-original-mlperf-inference', 'nvidia-harness', 'mlperf-inference-implementation']...` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * **`_reference`** (default) + - Aliases: `_mlcommons-python,_python` + - Environment variables: + - *CM_MLPERF_PYTHON*: `yes` + - *CM_MLPERF_IMPLEMENTATION*: `mlcommons_python` + - *CM_SQUAD_ACCURACY_DTYPE*: `float32` + - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` + - *CM_OPENIMAGES_ACCURACY_DTYPE*: `float32` + - *CM_LIBRISPEECH_ACCURACY_DTYPE*: `float32` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * app,mlperf,reference,inference + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': [True]}` + * CM names: `--adr.['python-reference-mlperf-inference', 'mlperf-inference-implementation']...` + - CM script: [app-mlperf-inference-mlcommons-python](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-mlcommons-python) + * `_tflite-cpp` + - Aliases: `_ctuning-cpp-tflite` + - Environment variables: + - *CM_MLPERF_TFLITE_CPP*: `yes` + - *CM_MLPERF_CPP*: `yes` + - *CM_MLPERF_IMPLEMENTATION*: `ctuning_cpp_tflite` + - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * app,mlperf,tflite-cpp,inference + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': [True]}` + * CM names: `--adr.['tflite-cpp-mlperf-inference', 'mlperf-inference-implementation']...` + - CM script: [app-mlperf-inference-ctuning-cpp-tflite](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-ctuning-cpp-tflite) + +
+ + + * Group "**backend**" +
+ Click here to expand this section. + + * `_deepsparse` + - Environment variables: + - *CM_MLPERF_BACKEND*: `deepsparse` + - Workflow: + * `_glow` + - Environment variables: + - *CM_MLPERF_BACKEND*: `glow` + - Workflow: + * `_ncnn` + - Environment variables: + - *CM_MLPERF_BACKEND*: `ncnn` + - Workflow: + * `_onnxruntime` + - Environment variables: + - *CM_MLPERF_BACKEND*: `onnxruntime` + - Workflow: + * `_pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - Workflow: + * `_ray` + - Environment variables: + - *CM_MLPERF_BACKEND*: `ray` + - Workflow: + * `_tensorrt` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tensorrt` + - Workflow: + * `_tf` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tf` + - Workflow: + * `_tflite` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tflite` + - Workflow: + * `_tvm-onnx` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-onnx` + - Workflow: + * `_tvm-pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-pytorch` + - Workflow: + * `_tvm-tflite` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tvm-tflite` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - Workflow: + * `_qaic` + - Environment variables: + - *CM_MLPERF_DEVICE*: `qaic` + - Workflow: + * `_rocm` + - Environment variables: + - *CM_MLPERF_DEVICE*: `rocm` + - Workflow: + * `_tpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `tpu` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_3d-unet-99` + - Environment variables: + - *CM_MODEL*: `3d-unet-99` + - Workflow: + * `_3d-unet-99.9` + - Environment variables: + - *CM_MODEL*: `3d-unet-99.9` + - Workflow: + * `_bert-99` + - Environment variables: + - *CM_MODEL*: `bert-99` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_MODEL*: `bert-99.9` + - Workflow: + * `_dlrm-v2-99` + - Environment variables: + - *CM_MODEL*: `dlrm-v2-99` + - Workflow: + * `_dlrm-v2-99.9` + - Environment variables: + - *CM_MODEL*: `dlrm-v2-99.9` + - Workflow: + * `_efficientnet` + - Environment variables: + - *CM_MODEL*: `efficientnet` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset-aux,imagenet-aux + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_imagenet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * CM names: `--adr.['mlperf-accuracy-script', 'imagenet-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_gptj-99` + - Environment variables: + - *CM_MODEL*: `gptj-99` + - Workflow: + * `_gptj-99.9` + - Environment variables: + - *CM_MODEL*: `gptj-99.9` + - Workflow: + * `_llama2-70b-99` + - Environment variables: + - *CM_MODEL*: `llama2-70b-99` + - Workflow: + * `_llama2-70b-99.9` + - Environment variables: + - *CM_MODEL*: `llama2-70b-99.9` + - Workflow: + * `_mobilenet` + - Environment variables: + - *CM_MODEL*: `mobilenet` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset-aux,imagenet-aux + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_imagenet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * CM names: `--adr.['mlperf-accuracy-script', 'imagenet-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset-aux,imagenet-aux + - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_imagenet + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * CM names: `--adr.['mlperf-accuracy-script', 'imagenet-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_openimages + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * CM names: `--adr.['mlperf-accuracy-script', 'openimages-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_rnnt` + - Environment variables: + - *CM_MODEL*: `rnnt` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_librispeech + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_IMPLEMENTATION': ['nvidia']}` + * CM names: `--adr.['mlperf-accuracy-script', 'librispeech-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_sdxl` + - Environment variables: + - *CM_MODEL*: `stable-diffusion-xl` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_coco2014 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_IMPLEMENTATION': ['nvidia']}` + * CM names: `--adr.['mlperf-accuracy-script', 'coco2014-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + +
+ + + * Group "**precision**" +
+ Click here to expand this section. + + * `_bfloat16` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `False` + - *CM_MLPERF_MODEL_PRECISION*: `float32` + - Workflow: + * `_float16` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `False` + - *CM_MLPERF_MODEL_PRECISION*: `float32` + - Workflow: + * **`_float32`** (default) + - Aliases: `_fp32` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `False` + - *CM_MLPERF_MODEL_PRECISION*: `float32` + - Workflow: + * `_int4` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `True` + - *CM_MLPERF_MODEL_PRECISION*: `int4` + - Workflow: + * `_int8` + - Aliases: `_quantized` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `True` + - *CM_MLPERF_MODEL_PRECISION*: `int8` + - Workflow: + * `_uint8` + - Environment variables: + - *CM_MLPERF_QUANTIZATION*: `True` + - *CM_MLPERF_MODEL_PRECISION*: `uint8` + - Workflow: + +
+ + + * Group "**execution-mode**" +
+ Click here to expand this section. + + * `_fast` + - Environment variables: + - *CM_FAST_FACTOR*: `5` + - *CM_OUTPUT_FOLDER_NAME*: `fast_results` + - *CM_MLPERF_RUN_STYLE*: `fast` + - Workflow: + * **`_test`** (default) + - Environment variables: + - *CM_OUTPUT_FOLDER_NAME*: `test_results` + - *CM_MLPERF_RUN_STYLE*: `test` + - Workflow: + * `_valid` + - Environment variables: + - *CM_OUTPUT_FOLDER_NAME*: `valid_results` + - *CM_MLPERF_RUN_STYLE*: `valid` + - Workflow: + +
+ + + * Group "**reproducibility**" +
+ Click here to expand this section. + + * `_r2.1_default` + - Environment variables: + - *CM_SKIP_SYS_UTILS*: `yes` + - *CM_TEST_QUERY_COUNT*: `100` + - Workflow: + * `_r3.0_default` + - Environment variables: + - *CM_SKIP_SYS_UTILS*: `yes` + - Workflow: + * `_r3.1_default` + - Workflow: + * `_r4.0_default` + - Workflow: + +
+ + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_3d-unet_` + - Environment variables: + - *CM_MLPERF_MODEL_EQUAL_ISSUE_MODE*: `yes` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_kits19,_int8 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_IMPLEMENTATION': ['nvidia']}` + * CM names: `--adr.['mlperf-accuracy-script', '3d-unet-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_bert_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,dataset,squad,language-processing + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_DATASET_SQUAD_VAL_PATH': 'on'}` + - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) + * get,dataset-aux,squad-vocab + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH': 'on'}` + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_squad + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * CM names: `--adr.['squad-accuracy-script', 'mlperf-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_dlrm_` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_terabyte,_float32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * CM names: `--adr.['terabyte-accuracy-script', 'mlperf-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_gptj_` + - Aliases: `_gptj` + - Environment variables: + - *CM_MLPERF_MODEL_EQUAL_ISSUE_MODE*: `yes` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_cnndm + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_IMPLEMENTATION': ['intel']}` + * CM names: `--adr.['cnndm-accuracy-script', 'mlperf-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_intel-original,gptj_` + - Workflow: + * `_llama2-70b_` + - Environment variables: + - *CM_MLPERF_MODEL_EQUAL_ISSUE_MODE*: `yes` + - Workflow: + 1. ***Read "posthook_deps" on other CM scripts*** + * run,accuracy,mlperf,_open-orca,_int32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_IMPLEMENTATION': ['nvidia']}` + * CM names: `--adr.['mlperf-accuracy-script', 'open-orca-accuracy-script']...` + - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) + * `_reference,bert_` + - Workflow: + * `_reference,dlrm-v2_` + - Workflow: + * `_reference,gptj_` + - Workflow: + * `_reference,llama2-70b_` + - Workflow: + * `_reference,sdxl_` + - Workflow: + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_intel-original,bert-99` + - Workflow: + * `_intel-original,bert-99.9` + - Workflow: + * `_intel-original,gptj-99` + - Workflow: + * `_intel-original,gptj-99.9` + - Workflow: + * `_intel-original,gptj_,build-harness` + - Workflow: + * `_intel-original,resnet50` + - Workflow: + * `_intel-original,retinanet` + - Workflow: + * `_kilt,qaic,bert-99` + - Workflow: + * `_kilt,qaic,bert-99.9` + - Workflow: + * `_kilt,qaic,resnet50` + - Workflow: + * `_kilt,qaic,retinanet` + - Workflow: + * `_power` + - Environment variables: + - *CM_MLPERF_POWER*: `yes` + - *CM_SYSTEM_POWER*: `yes` + - Workflow: + * `_reference,resnet50` + - Workflow: + * `_reference,retinanet` + - Workflow: + * `_rnnt,reference` + - Environment variables: + - *CM_MLPERF_PRINT_SUMMARY*: `no` + - Workflow: + * `_valid,retinanet` + - Workflow: + +
+ + + * Group "**batch_size**" +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `#` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * **`_offline`** (default) + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - Workflow: + +
+ + +#### Unsupported or invalid variation combinations + + + +* `_resnet50,_pytorch` +* `_retinanet,_tf` +* `_nvidia-original,_tf` +* `_nvidia-original,_onnxruntime` +* `_nvidia-original,_pytorch` +* `_nvidia,_tf` +* `_nvidia,_onnxruntime` +* `_nvidia,_pytorch` +* `_gptj,_tf` + +#### Default variations + +`_cpu,_float32,_offline,_reference,_resnet50,_test` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--debug=value` → `CM_DEBUG_SCRIPT_BENCHMARK_PROGRAM=value` +* `--docker=value` → `CM_RUN_DOCKER_CONTAINER=value` +* `--gpu_name=value` → `CM_NVIDIA_GPU_NAME=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--imagenet_path=value` → `IMAGENET_PATH=value` +* `--max_amps=value` → `CM_MLPERF_POWER_MAX_AMPS=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--max_volts=value` → `CM_MLPERF_POWER_MAX_VOLTS=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--ntp_server=value` → `CM_MLPERF_POWER_NTP_SERVER=value` +* `--num_threads=value` → `CM_NUM_THREADS=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `OUTPUT_BASE_DIR=value` +* `--power=value` → `CM_MLPERF_POWER=value` +* `--power_server=value` → `CM_MLPERF_POWER_SERVER_ADDRESS=value` +* `--readme=value` → `CM_MLPERF_README=value` +* `--regenerate_files=value` → `CM_REGENERATE_MEASURE_FILES=value` +* `--rerun=value` → `CM_RERUN=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--test_query_count=value` → `CM_TEST_QUERY_COUNT=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "clean":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_LOADGEN_MODE: `accuracy` +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_OUTPUT_FOLDER_NAME: `test_results` +* CM_MLPERF_RUN_STYLE: `test` +* CM_TEST_QUERY_COUNT: `10` +* CM_MLPERF_QUANTIZATION: `False` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,mlperf,inference,utils + - CM script: [get-mlperf-inference-utils](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-utils) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/run.sh) + 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/_cm.yaml)*** + * get,mlperf,sut,description + - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/_cm.yaml) + +___ +### Script output +`cmr "app vision language mlcommons mlperf inference generic [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MLPERF_*` +#### New environment keys auto-detected from customize + +* `CM_MLPERF_LOADGEN_COMPLIANCE_TEST` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf.md new file mode 100644 index 000000000..37dfa75d4 --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf.md @@ -0,0 +1,152 @@ +Automatically generated README for this automation recipe: **benchmark-program-mlperf** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=benchmark-program-mlperf,cfff0132a8aa4018) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *mlperf,benchmark-mlperf* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "mlperf benchmark-mlperf" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=mlperf,benchmark-mlperf` + +`cm run script --tags=mlperf,benchmark-mlperf[,variations] ` + +*or* + +`cmr "mlperf benchmark-mlperf"` + +`cmr "mlperf benchmark-mlperf [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'mlperf,benchmark-mlperf' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="mlperf,benchmark-mlperf"``` + +#### Run this script via Docker (beta) + +`cm docker script "mlperf benchmark-mlperf[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**power-mode**" +
+ Click here to expand this section. + + * **`_no-power`** (default) + - Workflow: + 1. ***Read "post_deps" on other CM scripts*** + * benchmark-program,program + * CM names: `--adr.['benchmark-program']...` + - CM script: [benchmark-program](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program) + * `_power` + - Environment variables: + - *CM_MLPERF_POWER*: `yes` + - Workflow: + 1. ***Read "prehook_deps" on other CM scripts*** + * benchmark-program,program + * CM names: `--adr.['benchmark-program']...` + - CM script: [benchmark-program](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program) + 1. ***Read "post_deps" on other CM scripts*** + * run,mlperf,power,client + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_LOADGEN_MODE': ['performance']}` + * CM names: `--adr.['mlperf-power-client']...` + - CM script: [run-mlperf-power-client](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-power-client) + +
+ + +#### Default variations + +`_no-power` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/_cm.json) + +___ +### Script output +`cmr "mlperf benchmark-mlperf [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app.md new file mode 100644 index 000000000..641d37d54 --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app.md @@ -0,0 +1,405 @@ +Automatically generated README for this automation recipe: **run-mlperf-inference-app** + +Category: **Modular MLPerf inference benchmark pipeline** + +License: **Apache 2.0** + +Developers: [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-inference-app,4a5d5b13fd7e4ac8) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *run-mlperf,inference* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run-mlperf,inference" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run-mlperf,inference` + +`cm run script --tags=run-mlperf,inference[,variations] [--input_flags]` + +*or* + +`cmr "run-mlperf,inference"` + +`cmr "run-mlperf,inference [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**division**=MLPerf division {open,closed} (*open*) +* --**category**=MLPerf category {edge,datacenter,network} (*edge*) +* --**device**=MLPerf device {cpu,cuda,rocm,qaic} (*cpu*) +* --**model**=MLPerf model {resnet50,retinanet,bert-99,bert-99.9,3d-unet-99,3d-unet-99.9,rnnt,dlrm-v2-99,dlrm-v2-99.9,gptj-99,gptj-99.9,sdxl,llama2-70b-99,llama2-70b-99.9,mobilenet,efficientnet} (*resnet50*) +* --**precision**=MLPerf model precision {float32,float16,bfloat16,int8,uint8} +* --**implementation**=MLPerf implementation {mlcommons-python,mlcommons-cpp,nvidia,intel,qualcomm,ctuning-cpp-tflite} (*mlcommons-python*) +* --**backend**=MLPerf framework (backend) {onnxruntime,tf,pytorch,deepsparse,tensorrt,glow,tvm-onnx} (*onnxruntime*) +* --**scenario**=MLPerf scenario {Offline,Server,SingleStream,MultiStream} (*Offline*) +* --**mode**=MLPerf benchmark mode {,accuracy,performance} +* --**execution_mode**=MLPerf execution mode {test,fast,valid} (*test*) +* --**sut**=SUT configuration (if known) +* --**submitter**=Submitter name (without space) (*CTuning*) +* --**results_dir**=Folder path to store results (defaults to the current working directory) +* --**submission_dir**=Folder path to store MLPerf submission tree +* --**adr.compiler.tags**=Compiler for loadgen and any C/C++ part of implementation +* --**adr.inference-src-loadgen.env.CM_GIT_URL**=Git URL for MLPerf inference sources to build LoadGen (to enable non-reference implementations) +* --**adr.inference-src.env.CM_GIT_URL**=Git URL for MLPerf inference sources to run benchmarks (to enable non-reference implementations) +* --**adr.mlperf-inference-implementation.max_batchsize**=Maximum batchsize to be used +* --**adr.mlperf-inference-implementation.num_threads**=Number of threads (reference & C++ implementation only) +* --**adr.python.name**=Python virtual environment name (optional) +* --**adr.python.version**=Force Python version (must have all system deps) +* --**adr.python.version_min**=Minimal Python version (*3.8*) +* --**power**=Measure power {yes,no} (*no*) +* --**adr.mlperf-power-client.power_server**=MLPerf Power server IP address (*192.168.0.15*) +* --**adr.mlperf-power-client.port**=MLPerf Power server port (*4950*) +* --**clean**=Clean run (*False*) +* --**compliance**=Whether to run compliance tests (applicable only for closed division) {yes,no} (*no*) +* --**dashboard_wb_project**=W&B dashboard project (*cm-mlperf-dse-testing*) +* --**dashboard_wb_user**=W&B dashboard user (*cmind*) +* --**hw_name**=MLPerf hardware name (for example "gcp.c3_standard_8", "nvidia_orin", "lenovo_p14s_gen_4_windows_11", "macbook_pro_m1_2", "thundercomm_rb6" ...) +* --**multistream_target_latency**=Set MultiStream target latency +* --**offline_target_qps**=Set LoadGen Offline target QPS +* --**quiet**=Quiet run (select default values for all questions) (*True*) +* --**server_target_qps**=Set Server target QPS +* --**singlestream_target_latency**=Set SingleStream target latency +* --**target_latency**=Set Target latency +* --**target_qps**=Set LoadGen target QPS +* --**j**=Print results dictionary to console at the end of the run (*False*) +* --**repro**=Record input/output/state/info files to make it easier to reproduce results (*False*) +* --**time**=Print script execution time at the end of the run (*True*) +* --**debug**=Debug this script (*False*) + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "division":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run-mlperf,inference' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run-mlperf,inference"``` + +#### Run this script via Docker (beta) + +`cm docker script "run-mlperf,inference[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_all-scenarios` + - Environment variables: + - *CM_MLPERF_LOADGEN_ALL_SCENARIOS*: `yes` + - Workflow: + * `_compliance` + - Environment variables: + - *CM_MLPERF_LOADGEN_COMPLIANCE*: `yes` + - Workflow: + * `_dashboard` + - Environment variables: + - *CM_MLPERF_DASHBOARD*: `on` + - Workflow: + +
+ + + * Group "**benchmark-version**" +
+ Click here to expand this section. + + * `_r2.1` + - Environment variables: + - *CM_MLPERF_INFERENCE_VERSION*: `2.1` + - *CM_RUN_MLPERF_INFERENCE_APP_DEFAULTS*: `r2.1_default` + - Workflow: + * `_r3.0` + - Environment variables: + - *CM_MLPERF_INFERENCE_VERSION*: `3.0` + - *CM_RUN_MLPERF_INFERENCE_APP_DEFAULTS*: `r3.0_default` + - Workflow: + * `_r3.1` + - Environment variables: + - *CM_MLPERF_INFERENCE_VERSION*: `3.1` + - *CM_RUN_MLPERF_INFERENCE_APP_DEFAULTS*: `r3.1_default` + - Workflow: + * `_r4.0` + - Environment variables: + - *CM_MLPERF_INFERENCE_VERSION*: `4.0` + - *CM_RUN_MLPERF_INFERENCE_APP_DEFAULTS*: `r4.0_default` + - Workflow: + +
+ + + * Group "**mode**" +
+ Click here to expand this section. + + * `_all-modes` + - Environment variables: + - *CM_MLPERF_LOADGEN_ALL_MODES*: `yes` + - Workflow: + +
+ + + * Group "**submission-generation**" +
+ Click here to expand this section. + + * `_accuracy-only` + - Environment variables: + - *CM_MLPERF_LOADGEN_MODE*: `accuracy` + - *CM_MLPERF_SUBMISSION_RUN*: `yes` + - *CM_RUN_MLPERF_ACCURACY*: `on` + - *CM_RUN_SUBMISSION_CHECKER*: `no` + - Workflow: + * **`_find-performance`** (default) + - Environment variables: + - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `yes` + - *CM_MLPERF_LOADGEN_ALL_MODES*: `no` + - *CM_MLPERF_LOADGEN_MODE*: `performance` + - *CM_MLPERF_RESULT_PUSH_TO_GITHUB*: `False` + - Workflow: + * `_performance-only` + - Environment variables: + - *CM_MLPERF_LOADGEN_MODE*: `performance` + - *CM_MLPERF_SUBMISSION_RUN*: `yes` + - *CM_RUN_SUBMISSION_CHECKER*: `no` + - Workflow: + * `_populate-readme` + - Environment variables: + - *CM_MLPERF_README*: `yes` + - *CM_MLPERF_SUBMISSION_RUN*: `yes` + - *CM_RUN_SUBMISSION_CHECKER*: `no` + - Workflow: + * `_submission` + - Environment variables: + - *CM_MLPERF_LOADGEN_COMPLIANCE*: `yes` + - *CM_MLPERF_SUBMISSION_RUN*: `yes` + - *CM_RUN_MLPERF_ACCURACY*: `on` + - *CM_RUN_SUBMISSION_CHECKER*: `yes` + - *CM_TAR_SUBMISSION_DIR*: `yes` + - Workflow: + 1. ***Read "post_deps" on other CM scripts*** + * generate,mlperf,inference,submission + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_SUBMISSION_GENERATION': ['no', 'false', 'False', '0']}` + * CM names: `--adr.['submission-generator']...` + - CM script: [generate-mlperf-inference-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-submission) + +
+ + + * Group "**submission-generation-style**" +
+ Click here to expand this section. + + * `_full` + - Environment variables: + - *CM_MLPERF_SUBMISSION_GENERATION_STYLE*: `full` + - *CM_MLPERF_SKIP_SUBMISSION_GENERATION*: `yes` + - Workflow: + * **`_short`** (default) + - Environment variables: + - *CM_MLPERF_SUBMISSION_GENERATION_STYLE*: `short` + - Workflow: + +
+ + +#### Default variations + +`_find-performance,_short` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--backend=value` → `CM_MLPERF_BACKEND=value` +* `--batch_size=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--category=value` → `CM_MLPERF_SUBMISSION_SYSTEM_TYPE=value` +* `--clean=value` → `CM_MLPERF_CLEAN_ALL=value` +* `--compliance=value` → `CM_MLPERF_LOADGEN_COMPLIANCE=value` +* `--dashboard_wb_project=value` → `CM_MLPERF_DASHBOARD_WANDB_PROJECT=value` +* `--dashboard_wb_user=value` → `CM_MLPERF_DASHBOARD_WANDB_USER=value` +* `--debug=value` → `CM_DEBUG_SCRIPT_BENCHMARK_PROGRAM=value` +* `--device=value` → `CM_MLPERF_DEVICE=value` +* `--division=value` → `CM_MLPERF_SUBMISSION_DIVISION=value` +* `--docker=value` → `CM_MLPERF_USE_DOCKER=value` +* `--dump_version_info=value` → `CM_DUMP_VERSION_INFO=value` +* `--execution_mode=value` → `CM_MLPERF_RUN_STYLE=value` +* `--find_performance=value` → `CM_MLPERF_FIND_PERFORMANCE_MODE=value` +* `--gpu_name=value` → `CM_NVIDIA_GPU_NAME=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--hw_notes_extra=value` → `CM_MLPERF_SUT_SW_NOTES_EXTRA=value` +* `--imagenet_path=value` → `IMAGENET_PATH=value` +* `--implementation=value` → `CM_MLPERF_IMPLEMENTATION=value` +* `--lang=value` → `CM_MLPERF_IMPLEMENTATION=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--model=value` → `CM_MLPERF_MODEL=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--network=value` → `CM_NETWORK_LOADGEN=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `OUTPUT_BASE_DIR=value` +* `--output_summary=value` → `MLPERF_INFERENCE_SUBMISSION_SUMMARY=value` +* `--output_tar=value` → `MLPERF_INFERENCE_SUBMISSION_TAR_FILE=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--power=value` → `CM_SYSTEM_POWER=value` +* `--precision=value` → `CM_MLPERF_MODEL_PRECISION=value` +* `--preprocess_submission=value` → `CM_RUN_MLPERF_SUBMISSION_PREPROCESSOR=value` +* `--push_to_github=value` → `CM_MLPERF_RESULT_PUSH_TO_GITHUB=value` +* `--readme=value` → `CM_MLPERF_README=value` +* `--regenerate_accuracy_file=value` → `CM_MLPERF_REGENERATE_ACCURACY_FILE=value` +* `--regenerate_files=value` → `CM_REGENERATE_MEASURE_FILES=value` +* `--rerun=value` → `CM_RERUN=value` +* `--results_dir=value` → `OUTPUT_BASE_DIR=value` +* `--results_git_url=value` → `CM_MLPERF_RESULTS_GIT_REPO_URL=value` +* `--run_checker=value` → `CM_RUN_SUBMISSION_CHECKER=value` +* `--run_style=value` → `CM_MLPERF_RUN_STYLE=value` +* `--save_console_log=value` → `CM_SAVE_CONSOLE_LOG=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--skip_submission_generation=value` → `CM_MLPERF_SKIP_SUBMISSION_GENERATION=value` +* `--skip_truncation=value` → `CM_SKIP_TRUNCATE_ACCURACY=value` +* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` +* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` +* `--sut=value` → `CM_MLPERF_INFERENCE_SUT_VARIATION=value` +* `--sut_servers=value` → `CM_NETWORK_LOADGEN_SUT_SERVERS=value` +* `--sw_notes_extra=value` → `CM_MLPERF_SUT_SW_NOTES_EXTRA=value` +* `--system_type=value` → `CM_MLPERF_SUBMISSION_SYSTEM_TYPE=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--test_query_count=value` → `CM_TEST_QUERY_COUNT=value` +* `--threads=value` → `CM_NUM_THREADS=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "backend":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_IMPLEMENTATION: `reference` +* CM_MLPERF_MODEL: `resnet50` +* CM_MLPERF_RUN_STYLE: `test` + +
+ +#### Versions +* `master` +* `r2.1` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/_cm.yaml)*** + * detect,os + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_USE_DOCKER': [True]}` + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_USE_DOCKER': [True]}` + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_USE_DOCKER': [True]}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,sut,description + - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) + * get,mlperf,inference,results,dir + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_USE_DOCKER': [False]}` + * Skip this dependenecy only if all ENV vars are set:
+`{'OUTPUT_BASE_DIR': [True]}` + * CM names: `--adr.['get-mlperf-inference-results-dir']...` + - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) + * install,pip-package,for-cmind-python,_package.tabulate + - CM script: [install-pip-package-for-cmind-python](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pip-package-for-cmind-python) + * get,mlperf,inference,utils + - CM script: [get-mlperf-inference-utils](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-utils) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/_cm.yaml) + +___ +### Script output +`cmr "run-mlperf,inference [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia.md b/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia.md new file mode 100644 index 000000000..baf219a06 --- /dev/null +++ b/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia.md @@ -0,0 +1,242 @@ +Automatically generated README for this automation recipe: **app-mlperf-training-nvidia** + +Category: **Modular MLPerf training benchmark pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-training-nvidia,1e2e357618cc4674) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *app,vision,language,mlcommons,mlperf,training,nvidia* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app vision language mlcommons mlperf training nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,vision,language,mlcommons,mlperf,training,nvidia` + +`cm run script --tags=app,vision,language,mlcommons,mlperf,training,nvidia[,variations] [--input_flags]` + +*or* + +`cmr "app vision language mlcommons mlperf training nvidia"` + +`cmr "app vision language mlcommons mlperf training nvidia [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,vision,language,mlcommons,mlperf,training,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,vision,language,mlcommons,mlperf,training,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "app vision language mlcommons mlperf training nvidia[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_bert` + - Environment variables: + - *CM_MLPERF_MODEL*: `bert` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_protobuf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` + * CM names: `--adr.['protobuf']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + * CM names: `--adr.['ml-engine-pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cuda`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cuda` + - *USE_CUDA*: `True` + - Workflow: + * `_tpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `tpu` + - *CUDA_VISIBLE_DEVICES*: `` + - *USE_CUDA*: `False` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_tf` + - Aliases: `_tensorflow` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tf` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + +
+ + +#### Default variations + +`_cuda` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` +* `--docker=value` → `CM_RUN_DOCKER_CONTAINER=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--model=value` → `CM_MLPERF_CUSTOM_MODEL_PATH=value` +* `--num_threads=value` → `CM_NUM_THREADS=value` +* `--output_dir=value` → `OUTPUT_BASE_DIR=value` +* `--rerun=value` → `CM_RERUN=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "clean":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `nvidia` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,training,src + * CM names: `--adr.['training-src', 'mlperf-training-src']...` + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + * get,git,repo,_repo.https://github.com/mlcommons/training_results_v2.1 + * CM names: `--adr.['training-results', 'mlperf-training-results']...` + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['cuda']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,generic-python-lib,_torchvision_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['cuda']}` + * CM names: `--adr.['ml-engine-torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_mlperf_logging + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * prepare,mlperf,training,data,bert,_nvidia + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_MODEL': ['bert']}` + * CM names: `--adr.['prepare-data', 'bert-model']...` + - CM script: [prepare-training-data-bert](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-bert) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/_cm.yaml) + 1. ***Run native script if exists*** + * [run-bert-training.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/run-bert-training.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/_cm.yaml) + +___ +### Script output +`cmr "app vision language mlcommons mlperf training nvidia [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_HW_NAME` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference.md b/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference.md new file mode 100644 index 000000000..91149ed22 --- /dev/null +++ b/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference.md @@ -0,0 +1,240 @@ +Automatically generated README for this automation recipe: **app-mlperf-training-reference** + +Category: **Modular MLPerf training benchmark pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-training-reference,0c4b11bdcf494b4f) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *app,vision,language,mlcommons,mlperf,training,reference,ref* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app vision language mlcommons mlperf training reference ref" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,vision,language,mlcommons,mlperf,training,reference,ref` + +`cm run script --tags=app,vision,language,mlcommons,mlperf,training,reference,ref[,variations] [--input_flags]` + +*or* + +`cmr "app vision language mlcommons mlperf training reference ref"` + +`cmr "app vision language mlcommons mlperf training reference ref [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,vision,language,mlcommons,mlperf,training,reference,ref' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,vision,language,mlcommons,mlperf,training,reference,ref"``` + +#### Run this script via Docker (beta) + +`cm docker script "app vision language mlcommons mlperf training reference ref[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_bert` + - Environment variables: + - *CM_MLPERF_MODEL*: `bert` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_protobuf + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` + * CM names: `--adr.['protobuf']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + * CM names: `--adr.['ml-engine-pytorch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cuda`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `cuda` + - *USE_CUDA*: `True` + - Workflow: + * `_tpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `tpu` + - *CUDA_VISIBLE_DEVICES*: `` + - *USE_CUDA*: `False` + - Workflow: + +
+ + + * Group "**framework**" +
+ Click here to expand this section. + + * `_pytorch` + - Environment variables: + - *CM_MLPERF_BACKEND*: `pytorch` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + * `_tf` + - Aliases: `_tensorflow` + - Environment variables: + - *CM_MLPERF_BACKEND*: `tf` + - *CM_MLPERF_BACKEND_VERSION*: `<<>>` + - Workflow: + +
+ + +#### Default variations + +`_cuda` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` +* `--docker=value` → `CM_RUN_DOCKER_CONTAINER=value` +* `--hw_name=value` → `CM_HW_NAME=value` +* `--model=value` → `CM_MLPERF_CUSTOM_MODEL_PATH=value` +* `--num_threads=value` → `CM_NUM_THREADS=value` +* `--output_dir=value` → `OUTPUT_BASE_DIR=value` +* `--rerun=value` → `CM_RERUN=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "clean":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `reference` +* CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX: `` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,mlperf,training,src + * CM names: `--adr.['training-src']...` + - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) + * get,cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_DEVICE': ['cuda']}` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,generic-python-lib,_torchvision_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['cuda']}` + * CM names: `--adr.['ml-engine-torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_mlperf_logging + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * prepare,mlperf,training,data,bert,_reference + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_MODEL': ['bert']}` + * CM names: `--adr.['prepare-data', 'bert-model']...` + - CM script: [prepare-training-data-bert](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-bert) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/_cm.yaml) + 1. ***Run native script if exists*** + * [run-bert-training.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/run-bert-training.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/_cm.yaml) + +___ +### Script output +`cmr "app vision language mlcommons mlperf training reference ref [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_DATASET_*` +* `CM_HW_NAME` +* `CM_MLPERF_*` +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize diff --git a/docs/Modular-application-pipeline/app-image-corner-detection.md b/docs/Modular-application-pipeline/app-image-corner-detection.md new file mode 100644 index 000000000..933030b4f --- /dev/null +++ b/docs/Modular-application-pipeline/app-image-corner-detection.md @@ -0,0 +1,129 @@ +Automatically generated README for this automation recipe: **app-image-corner-detection** + +Category: **Modular application pipeline** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-corner-detection,998ffee0bc534d0a) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *app,image,corner-detection* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app image corner-detection" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,image,corner-detection` + +`cm run script --tags=app,image,corner-detection ` + +*or* + +`cmr "app image corner-detection"` + +`cmr "app image corner-detection " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,image,corner-detection' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,image,corner-detection"``` + +#### Run this script via Docker (beta) + +`cm docker script "app image corner-detection" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/run.sh) + 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/_cm.json)*** + * compile,cpp-program + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_COMPILE': ['on']}` + - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) + * benchmark-program + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_RUN': ['on']}` + - CM script: [benchmark-program](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/_cm.json) + +___ +### Script output +`cmr "app image corner-detection " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Platform-information/detect-cpu.md b/docs/Platform-information/detect-cpu.md new file mode 100644 index 000000000..353ee6d4b --- /dev/null +++ b/docs/Platform-information/detect-cpu.md @@ -0,0 +1,128 @@ +Automatically generated README for this automation recipe: **detect-cpu** + +Category: **Platform information** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=detect-cpu,586c8a43320142f7) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *detect,cpu,detect-cpu,info* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "detect cpu detect-cpu info" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=detect,cpu,detect-cpu,info` + +`cm run script --tags=detect,cpu,detect-cpu,info ` + +*or* + +`cmr "detect cpu detect-cpu info"` + +`cmr "detect cpu detect-cpu info " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'detect,cpu,detect-cpu,info' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="detect,cpu,detect-cpu,info"``` + +#### Run this script via Docker (beta) + +`cm docker script "detect cpu detect-cpu info" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/_cm.json) + +___ +### Script output +`cmr "detect cpu detect-cpu info " -j` +#### New environment keys (filter) + +* `CM_HOST_CPU_*` +* `CM_HOST_DISK_CAPACITY` +* `CM_HOST_MEMORY_CAPACITY` +#### New environment keys auto-detected from customize + +* `CM_HOST_CPU_PHYSICAL_CORES_PER_SOCKET` +* `CM_HOST_CPU_SOCKETS` +* `CM_HOST_CPU_THREADS_PER_CORE` +* `CM_HOST_CPU_TOTAL_LOGICAL_CORES` \ No newline at end of file diff --git a/docs/Platform-information/detect-os.md b/docs/Platform-information/detect-os.md new file mode 100644 index 000000000..07061659a --- /dev/null +++ b/docs/Platform-information/detect-os.md @@ -0,0 +1,138 @@ +Automatically generated README for this automation recipe: **detect-os** + +Category: **Platform information** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=detect-os,863735b7db8c44fc) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *detect-os,detect,os,info* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "detect-os detect os info" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=detect-os,detect,os,info` + +`cm run script --tags=detect-os,detect,os,info ` + +*or* + +`cmr "detect-os detect os info"` + +`cmr "detect-os detect os info " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'detect-os,detect,os,info' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="detect-os,detect,os,info"``` + +#### Run this script via Docker (beta) + +`cm docker script "detect-os detect os info" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/_cm.json)*** + * get,sys-utils-min + * Enable this dependency only if all ENV vars are set:
+`{'CM_HOST_OS_TYPE': ['windows']}` + - CM script: [get-sys-utils-min](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-min) + +___ +### Script output +`cmr "detect-os detect os info " -j` +#### New environment keys (filter) + +* `+CM_HOST_OS_*` +* `+PATH` +* `CM_HOST_OS_*` +* `CM_HOST_PLATFORM_*` +* `CM_HOST_PYTHON_*` +* `CM_HOST_SYSTEM_NAME` +* `CM_RUN_STATE_DOCKER` +#### New environment keys auto-detected from customize + +* `CM_HOST_OS_BITS` +* `CM_HOST_OS_MACHINE` +* `CM_HOST_OS_PACKAGE_MANAGER` +* `CM_HOST_OS_PACKAGE_MANAGER_INSTALL_CMD` +* `CM_HOST_OS_PACKAGE_MANAGER_UPDATE_CMD` +* `CM_HOST_OS_TYPE` +* `CM_HOST_PYTHON_BITS` +* `CM_HOST_SYSTEM_NAME` \ No newline at end of file diff --git a/docs/Python-automation/activate-python-venv.md b/docs/Python-automation/activate-python-venv.md new file mode 100644 index 000000000..f2d9f47bf --- /dev/null +++ b/docs/Python-automation/activate-python-venv.md @@ -0,0 +1,121 @@ +Automatically generated README for this automation recipe: **activate-python-venv** + +Category: **Python automation** + +License: **Apache 2.0** + +Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=activate-python-venv,fcbbb84946f34c55) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *activate,python-venv* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "activate python-venv" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=activate,python-venv` + +`cm run script --tags=activate,python-venv ` + +*or* + +`cmr "activate python-venv"` + +`cmr "activate python-venv " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'activate,python-venv' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="activate,python-venv"``` + +#### Run this script via Docker (beta) + +`cm docker script "activate python-venv" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/_cm.json)*** + * install,python-venv + * CM names: `--adr.['python-venv']...` + - CM script: [install-python-venv](https://github.com/mlcommons/cm4mlops/tree/master/script/install-python-venv) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/_cm.json) + +___ +### Script output +`cmr "activate python-venv " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Python-automation/get-generic-python-lib.md b/docs/Python-automation/get-generic-python-lib.md new file mode 100644 index 000000000..ce3f9525c --- /dev/null +++ b/docs/Python-automation/get-generic-python-lib.md @@ -0,0 +1,681 @@ +Automatically generated README for this automation recipe: **get-generic-python-lib** + +Category: **Python automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-generic-python-lib,94b62a682bc44791) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,generic-python-lib* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get generic-python-lib" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,generic-python-lib` + +`cm run script --tags=get,generic-python-lib[,variations] [--input_flags]` + +*or* + +`cmr "get generic-python-lib"` + +`cmr "get generic-python-lib [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,generic-python-lib' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,generic-python-lib"``` + +#### Run this script via Docker (beta) + +`cm docker script "get generic-python-lib[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_Pillow` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `Pillow` + - Workflow: + * `_apache-tvm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `apache-tvm` + - *CM_GENERIC_PYTHON_PIP_EXTRA*: ` --pre` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_typing_extensions + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_apex` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `apex` + - Workflow: + * `_async_timeout` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `async_timeout` + - Workflow: + * `_attr` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `attr` + - Workflow: + * `_attrs` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `attrs` + - Workflow: + * `_boto3` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `boto3` + - Workflow: + * `_cloudpickle` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `cloudpickle` + - Workflow: + * `_cmind` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `cmind` + - Workflow: + * `_colored` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `colored` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://pypi.ngc.nvidia.com` + - Workflow: + * `_conda.#` + - Workflow: + * `_cupy` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `cupy` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_custom-python` + - Environment variables: + - *CM_TMP_USE_CUSTOM_PYTHON*: `on` + - Workflow: + * `_datasets` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `datasets` + - Workflow: + * `_decorator` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `decorator` + - Workflow: + * `_deepsparse` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `deepsparse` + - Workflow: + * `_dllogger` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `dllogger` + - *CM_GENERIC_PYTHON_PIP_URL*: `git+https://github.com/NVIDIA/dllogger#egg=dllogger` + - Workflow: + * `_fiftyone` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `fiftyone` + - Workflow: + * `_google-api-python-client` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `google_api_python_client` + - Workflow: + * `_google-auth-oauthlib` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `google_auth_oauthlib` + - Workflow: + * `_huggingface_hub` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `huggingface_hub` + - Workflow: + * `_inflect` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `inflect` + - Workflow: + * `_jax` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `jax` + - Workflow: + * `_jax_cuda` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `jax[cuda]` + - *CM_GENERIC_PYTHON_PIP_EXTRA*: `-f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html` + - *CM_JAX_VERSION_EXTRA*: `CUDA` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_librosa` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `librosa` + - Workflow: + * `_matplotlib` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `matplotlib` + - Workflow: + * `_mlperf_loadgen` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `mlperf_loadgen` + - *CM_GENERIC_PYTHON_PIP_URL*: `git+https://github.com/mlcommons/inference.git#subdirectory=loadgen` + - Workflow: + * `_mlperf_logging` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `mlperf_logging` + - *CM_GENERIC_PYTHON_PIP_URL*: `git+https://github.com/mlperf/logging.git` + - Workflow: + * `_mpld3` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `mpld3` + - Workflow: + * `_nibabel` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `nibabel` + - Workflow: + * `_numpy` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `numpy` + - Workflow: + * `_nvidia-apex` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `apex` + - *CM_GENERIC_PYTHON_PACKAGE_VARIANT*: `nvidia-apex` + - *CM_GENERIC_PYTHON_PIP_URL*: `git+https://github.com/nvidia/apex@0da3ffb92ee6fbe5336602f0e3989db1cd16f880` + - Workflow: + * `_nvidia-apex-from-src` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `apex` + - *CM_GENERIC_PYTHON_PACKAGE_VARIANT*: `nvidia-apex` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,generic-python-lib,_torch_cuda + * CM names: `--adr.['torch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,git,repo,_repo.https://github.com/NVIDIA/apex,_tag.23.05 + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + * `_nvidia-dali` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `nvidia-dali-cuda120` + - *CM_GENERIC_PYTHON_PIP_EXTRA*: ` --upgrade --default-timeout=900` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://developer.download.nvidia.com/compute/redist` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_nvidia-pycocotools` + - Environment variables: + - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `pycocotools` + - *CM_GENERIC_PYTHON_PIP_URL*: `pycocotools@git+https://github.com/NVIDIA/cocoapi#subdirectory=PythonAPI` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.cython + * CM names: `--adr.['cython']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.numpy + * CM names: `--adr.['numpy']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_nvidia-pyindex` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `nvidia-pyindex` + - Workflow: + * `_nvidia-tensorrt` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `nvidia-tensorrt` + - Workflow: + * `_onnx` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnx` + - Workflow: + * `_onnx-graphsurgeon` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnx_graphsurgeon` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.nvidia-pyindex + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_onnxruntime` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnxruntime` + - Workflow: + * `_onnxruntime,rocm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnxruntime-training` + - *CM_GENERIC_PYTHON_PIP_URL*: `https://download.onnxruntime.ai/onnxruntime_training-1.16.0%2Brocm56-cp3<<>>-cp3<<>>-manylinux_2_17_x86_64.manylinux2014_x86_64.whl` + - Workflow: + * `_onnxruntime_gpu` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnxruntime_gpu` + - *CM_ONNXRUNTIME_VERSION_EXTRA*: `GPU` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_opencv-python` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `opencv-python` + - Workflow: + * `_package.#` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `#` + - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `` + - *CM_GENERIC_PYTHON_PIP_URL*: `` + - Workflow: + * `_pandas` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `pandas` + - Workflow: + * `_path.#` + - Environment variables: + - *CM_GENERIC_PYTHON_PIP_URL*: `#` + - Workflow: + * `_pillow` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `Pillow` + - Workflow: + * `_pip` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `pip` + - Workflow: + * `_polygraphy` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `polygraphy` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://pypi.ngc.nvidia.com` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_colored + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_pre` + - Environment variables: + - *CM_GENERIC_PYTHON_DEV_VERSION*: `yes` + - Workflow: + * `_protobuf` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `protobuf` + - Workflow: + * `_psutil` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `psutil` + - Workflow: + * `_pycocotools` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `pycocotools` + - Workflow: + * `_pycuda` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `pycuda` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_ray` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `ray[default]` + - Workflow: + * `_requests` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `requests` + - Workflow: + * `_rocm` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,rocm + * CM names: `--adr.['rocm']...` + - CM script: [get-rocm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-rocm) + * `_safetensors` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `safetensors` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,rust-compiler + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_HOST_PLATFORM_FLAVOR': ['x86_64']}` + - CM script: [get-compiler-rust](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-rust) + * `_scikit-learn` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `scikit-learn` + - Workflow: + * `_scipy` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `scipy` + - Workflow: + * `_scons` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `scons` + - Workflow: + * `_setfit` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `setfit` + - Workflow: + * `_setuptools` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `setuptools` + - Workflow: + * `_six` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `six` + - Workflow: + * `_sklearn` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `sklearn` + - Workflow: + * `_sox` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `sox` + - Workflow: + * `_sparsezoo` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `sparsezoo` + - Workflow: + * `_streamlit` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `streamlit` + - Workflow: + * `_streamlit_option_menu` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `streamlit_option_menu` + - Workflow: + * `_tensorboard` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tensorboard` + - Workflow: + * `_tensorflow` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tensorflow` + - Workflow: + * `_tensorflow,rocm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tensorflow-rocm` + - Workflow: + * `_tensorrt` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tensorrt` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/<<>>` + - *CM_TORCH_VERSION_EXTRA*: `CUDA` + - Workflow: + * `_tflite` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tflite` + - Workflow: + * `_tflite-runtime` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tflite-runtime` + - Workflow: + * `_tokenization` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tokenization` + - Workflow: + * `_toml` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `toml` + - Workflow: + * `_torch` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/cpu` + - Workflow: + * `_torch,pre` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` + - *CM_GENERIC_PYTHON_PIP_EXTRA*: ` --pre` + - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/nightly/cpu` + - Workflow: + * `_torch,rocm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` + - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/rocm5.6` + - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `torch` + - Workflow: + 1. ***Read "post_deps" on other CM scripts*** + * get,generic-python-lib,_torchvision,_rocm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchaudio,_rocm + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_torch_cuda` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL1*: `https://download.pytorch.org/whl/<<>>` + - *CM_TORCH_VERSION_EXTRA*: `CUDA` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_torch_cuda,pre` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` + - *CM_GENERIC_PYTHON_PIP_EXTRA*: ` --pre` + - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/<<>>` + - *CM_TORCH_VERSION_EXTRA*: `CUDA` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_torch_tensorrt` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch-tensorrt` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/<<>>` + - *CM_TORCH_VERSION_EXTRA*: `CUDA` + - Workflow: + * `_torchaudio` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchaudio` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/cpu` + - Workflow: + * `_torchaudio,rocm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchaudio` + - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/rocm5.6` + - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `torchaudio` + - Workflow: + * `_torchaudio_cuda` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchaudio` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL1*: `https://download.pytorch.org/whl/<<>>` + - *CM_TORCHAUDIO_VERSION_EXTRA*: `CUDA` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_torchvision` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchvision` + - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/cpu` + - Workflow: + * `_torchvision,rocm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchvision` + - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/rocm5.6` + - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `torchvision` + - Workflow: + * `_torchvision_cuda` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchvision` + - *CM_TORCHVISION_VERSION_EXTRA*: `CUDA` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * `_tornado` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tornado` + - Workflow: + * `_tqdm` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tqdm` + - Workflow: + * `_transformers` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `transformers` + - Workflow: + * `_typing_extensions` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `typing_extensions` + - Workflow: + * `_ujson` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `ujson` + - Workflow: + * `_unidecode` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `unidecode` + - Workflow: + * `_url.#` + - Environment variables: + - *CM_GENERIC_PYTHON_PIP_URL*: `#` + - *CM_TMP_PYTHON_PACKAGE_FORCE_INSTALL*: `yes` + - Workflow: + * `_wandb` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `wandb` + - Workflow: + * `_west` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `west` + - Workflow: + * `_xgboost` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `xgboost` + - Workflow: + * `_xlsxwriter` + - Environment variables: + - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `xlsxwriter` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--extra_index_url=value` → `CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL=value` +* `--force_install=value` → `CM_TMP_PYTHON_PACKAGE_FORCE_INSTALL=value` +* `--index_url=value` → `CM_GENERIC_PYTHON_PIP_INDEX_URL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "extra_index_url":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_TMP_USE_CUSTOM_PYTHON': ['on']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,generic-python-lib,_pip + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_GENERIC_PYTHON_PACKAGE_NAME': ['pip']}` + * CM names: `--adr.['python-pip', 'pip']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/_cm.json)*** + * install,onnxruntime,from.src,_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_INSTALL_ONNXRUNTIME_GPU_FROM_SRC': ['yes']}` + - CM script: [install-onnxruntime-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-onnxruntime-from-src) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/_cm.json) + +___ +### Script output +`cmr "get generic-python-lib [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_PYTHONLIB_*` +#### New environment keys auto-detected from customize diff --git a/docs/Python-automation/get-python3.md b/docs/Python-automation/get-python3.md new file mode 100644 index 000000000..2a011ed3c --- /dev/null +++ b/docs/Python-automation/get-python3.md @@ -0,0 +1,169 @@ +Automatically generated README for this automation recipe: **get-python3** + +Category: **Python automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-python3,d0b5dd74373f4a62) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,python,python3,get-python,get-python3* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get python python3 get-python get-python3" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,python,python3,get-python,get-python3` + +`cm run script --tags=get,python,python3,get-python,get-python3[,variations] ` + +*or* + +`cmr "get python python3 get-python get-python3"` + +`cmr "get python python3 get-python get-python3 [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,python,python3,get-python,get-python3' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,python,python3,get-python,get-python3"``` + +#### Run this script via Docker (beta) + +`cm docker script "get python python3 get-python get-python3[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_conda.#` + - Environment variables: + - *CM_PYTHON_CONDA*: `yes` + - *CM_PYTHON_INSTALL_CACHE_TAGS*: `_conda.#` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic,conda-package,_name.#,_package.python + * CM names: `--adr.['conda-package', 'conda-python']...` + - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) + * `_custom-path.#` + - Environment variables: + - *CM_PYTHON_BIN_WITH_PATH*: `#` + - Workflow: + * `_lto` + - Workflow: + * `_optimized` + - Workflow: + * `_shared` + - Workflow: + * `_with-custom-ssl` + - Workflow: + * `_with-ssl` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/_cm.json)*** + * install,python,src + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + - CM script: [install-python-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-python-src) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/_cm.json) + +___ +### Script output +`cmr "get python python3 get-python get-python3 [,variations]" -j` +#### New environment keys (filter) + +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_PYTHON_*` +#### New environment keys auto-detected from customize + +* `CM_PYTHON_BIN` +* `CM_PYTHON_BIN_PATH` +* `CM_PYTHON_BIN_WITH_PATH` +* `CM_PYTHON_CACHE_TAGS` +* `CM_PYTHON_MAJOR_VERSION` +* `CM_PYTHON_MINOR_VERSION` +* `CM_PYTHON_PATCH_VERSION` \ No newline at end of file diff --git a/docs/Python-automation/install-generic-conda-package.md b/docs/Python-automation/install-generic-conda-package.md new file mode 100644 index 000000000..6743ef900 --- /dev/null +++ b/docs/Python-automation/install-generic-conda-package.md @@ -0,0 +1,158 @@ +Automatically generated README for this automation recipe: **install-generic-conda-package** + +Category: **Python automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-generic-conda-package,d9275487f5314195) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get install generic generic-conda-lib conda-lib conda-package generic-conda-package" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package` + +`cm run script --tags=get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package[,variations] ` + +*or* + +`cmr "get install generic generic-conda-lib conda-lib conda-package generic-conda-package"` + +`cmr "get install generic generic-conda-lib conda-lib conda-package generic-conda-package [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package"``` + +#### Run this script via Docker (beta) + +`cm docker script "get install generic generic-conda-lib conda-lib conda-package generic-conda-package[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_name.#` + - Workflow: + * `_package.#` + - Environment variables: + - *CM_CONDA_PKG_NAME*: `#` + - Workflow: + +
+ + + * Group "**package-source**" +
+ Click here to expand this section. + + * `_source.#` + - Environment variables: + - *CM_CONDA_PKG_SRC*: `#` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,conda + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + * get,conda + * CM names: `--adr.['conda']...` + - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/_cm.json) + +___ +### Script output +`cmr "get install generic generic-conda-lib conda-lib conda-package generic-conda-package [,variations]" -j` +#### New environment keys (filter) + +* `CM_PYTHONLIB_*` +#### New environment keys auto-detected from customize diff --git a/docs/Python-automation/install-python-src.md b/docs/Python-automation/install-python-src.md new file mode 100644 index 000000000..1fd8e9eae --- /dev/null +++ b/docs/Python-automation/install-python-src.md @@ -0,0 +1,182 @@ +Automatically generated README for this automation recipe: **install-python-src** + +Category: **Python automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-python-src,12d3a608afe14a1e) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,src,python,python3,src-python3,src-python* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install src python python3 src-python3 src-python" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,src,python,python3,src-python3,src-python` + +`cm run script --tags=install,src,python,python3,src-python3,src-python[,variations] ` + +*or* + +`cmr "install src python python3 src-python3 src-python"` + +`cmr "install src python python3 src-python3 src-python [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,src,python,python3,src-python3,src-python' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,src,python,python3,src-python3,src-python"``` + +#### Run this script via Docker (beta) + +`cm docker script "install src python python3 src-python3 src-python[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_lto` + - Environment variables: + - *CM_PYTHON_LTO_FLAG*: ` --lto` + - *CM_PYTHON_INSTALL_CACHE_TAGS*: `with-lto` + - Workflow: + * `_optimized` + - Environment variables: + - *CM_PYTHON_OPTIMIZATION_FLAG*: ` --enable-optimizations` + - *CM_PYTHON_INSTALL_CACHE_TAGS*: `optimized` + - Workflow: + * `_shared` + - Environment variables: + - *CM_PYTHON_INSTALL_CACHE_TAGS*: `shared` + - *CM_SHARED_BUILD*: `yes` + - Workflow: + * `_with-custom-ssl` + - Environment variables: + - *CM_CUSTOM_SSL*: `yes` + - *CM_PYTHON_INSTALL_CACHE_TAGS*: `with-custom-ssl` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,openssl + - CM script: [get-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-openssl) + * `_with-ssl` + - Environment variables: + - *CM_ENABLE_SSL*: `yes` + - *CM_PYTHON_INSTALL_CACHE_TAGS*: `with-ssl` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ENABLE_SSL: `no` +* CM_CUSTOM_SSL: `no` +* CM_SHARED_BUILD: `no` +* CM_PYTHON_OPTIMIZATION_FLAG: `` +* CM_PYTHON_LTO_FLAG: `` +* CM_WGET_URL: `https://www.python.org/ftp/python/[PYTHON_VERSION]/Python-[PYTHON_VERSION].tgz` + +
+ +#### Versions +Default version: `3.10.13` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/_cm.json)*** + * get,python3 + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_REQUIRE_INSTALL': ['yes']}` + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + +___ +### Script output +`cmr "install src python python3 src-python3 src-python [,variations]" -j` +#### New environment keys (filter) + +* `+C_INCLUDE_PATH` +* `+LD_LIBRARY_PATH` +* `+PATH` +* `CM_PYTHON_BIN_WITH_PATH` +* `CM_PYTHON_INSTALL_PATH` +#### New environment keys auto-detected from customize + +* `CM_PYTHON_BIN_WITH_PATH` \ No newline at end of file diff --git a/docs/Python-automation/install-python-venv.md b/docs/Python-automation/install-python-venv.md new file mode 100644 index 000000000..8b269d741 --- /dev/null +++ b/docs/Python-automation/install-python-venv.md @@ -0,0 +1,152 @@ +Automatically generated README for this automation recipe: **install-python-venv** + +Category: **Python automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-python-venv,7633ebada4584c6c) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *install,python,get-python-venv,python-venv* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "install python get-python-venv python-venv" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=install,python,get-python-venv,python-venv` + +`cm run script --tags=install,python,get-python-venv,python-venv[,variations] ` + +*or* + +`cmr "install python get-python-venv python-venv"` + +`cmr "install python get-python-venv python-venv [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'install,python,get-python-venv,python-venv' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="install,python,get-python-venv,python-venv"``` + +#### Run this script via Docker (beta) + +`cm docker script "install python get-python-venv python-venv[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_lto` + - Workflow: + * `_optimized` + - Workflow: + * `_shared` + - Workflow: + * `_with-custom-ssl` + - Workflow: + * `_with-ssl` + - Workflow: + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/_cm.json)*** + * get,python,-virtual + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/_cm.json)*** + * get,python3 + * CM names: `--adr.['register-python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + +___ +### Script output +`cmr "install python get-python-venv python-venv [,variations]" -j` +#### New environment keys (filter) + +* `CM_PYTHON_BIN_WITH_PATH` +* `CM_VIRTUAL_ENV_*` +#### New environment keys auto-detected from customize + +* `CM_PYTHON_BIN_WITH_PATH` +* `CM_VIRTUAL_ENV_DIR` +* `CM_VIRTUAL_ENV_PATH` +* `CM_VIRTUAL_ENV_SCRIPTS_PATH` \ No newline at end of file diff --git a/docs/Remote-automation/remote-run-commands.md b/docs/Remote-automation/remote-run-commands.md new file mode 100644 index 000000000..7782b7f6f --- /dev/null +++ b/docs/Remote-automation/remote-run-commands.md @@ -0,0 +1,145 @@ +Automatically generated README for this automation recipe: **remote-run-commands** + +Category: **Remote automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=remote-run-commands,b71e24b03c9d49cd) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "remote run cmds remote-run remote-run-cmds ssh-run ssh" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh` + +`cm run script --tags=remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh [--input_flags]` + +*or* + +`cmr "remote run cmds remote-run remote-run-cmds ssh-run ssh"` + +`cmr "remote run cmds remote-run remote-run-cmds ssh-run ssh " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh"``` + +#### Run this script via Docker (beta) + +`cm docker script "remote run cmds remote-run remote-run-cmds ssh-run ssh" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--client_refresh=value` → `CM_SSH_CLIENT_REFRESH=value` +* `--host=value` → `CM_SSH_HOST=value` +* `--password=value` → `CM_SSH_PASSWORD=value` +* `--port=value` → `CM_SSH_PORT=value` +* `--run_cmds=value` → `CM_SSH_RUN_COMMANDS=value` +* `--skip_host_verify=value` → `CM_SSH_SKIP_HOST_VERIFY=value` +* `--ssh_key_file=value` → `CM_SSH_KEY_FILE=value` +* `--user=value` → `CM_SSH_USER=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "client_refresh":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SSH_PORT: `22` +* CM_SSH_HOST: `localhost` +* CM_SSH_USER: `$USER` +* CM_SSH_CLIENT_REFRESH: `10` +* CM_SSH_KEY_FILE: `$HOME/.ssh/id_rsa` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/_cm.json) + +___ +### Script output +`cmr "remote run cmds remote-run remote-run-cmds ssh-run ssh " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia.md b/docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia.md new file mode 100644 index 000000000..c7f83ff09 --- /dev/null +++ b/docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia.md @@ -0,0 +1,1333 @@ +Automatically generated README for this automation recipe: **app-mlperf-inference-nvidia** + +Category: **Reproduce MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-nvidia,bc3b17fb430f4732) ]* + +--- + +This script is a CM wrapper to the official [Nvidia submission code](https://github.com/mlcommons/inference_results_v3.0/tree/master/closed/NVIDIA) used for MLPerf inference submissions. + + + +## Download the needed files + +* Please ask privately in [this discord channel](https://discord.gg/y7hupJsUNb) if you would like to get access to an Amazon S3 bucket containing all the needed files for easiness. Otherwise, you can download them from the below links. + +For x86 machines, please download the latest install tar files from the below sites +1. [cuDNN](https://developer.nvidia.com/cudnn) (for cuda 11) +2. [TensorRT](https://developer.nvidia.com/tensorrt) +3. Imagenet validation set (unfortunately not available via public URL) following the instructions given [here](https://github.com/mlcommons/ck/blob/master/cm-mlops/script/get-dataset-imagenet-val/README-extra.md) + +
+ + + +## Using Docker (Recommended on x86 systems) + + +Assuming all the downloaded files are to the user home directory please do the following steps: + +1. Download CUDA 11.8 + ``` + wget https://developer.download.nvidia.com/compute/cuda/11.8.0/local_installers/cuda_11.8.0_520.61.05_linux.run + ``` +2. [Install docker](https://docs.docker.com/engine/install/) and [Nvidia container toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) + +3. Give docker permission to the current user + ``` + sudo usermod -aG docker $USER + ``` + Logout and login + Restart docker if required and confirm that Nvidia container toolkit is working by + ``` + nvidia-ctk --version + ``` +4. Check if Nvidia driver is working properly on the host. + ``` + nvidia-smi + ``` + If the above command produces any error you'll need to install Nvidia drivers on the host. You can do this via CM if you have sudo access + ``` + cmr "install cuda prebuilt _driver" --version=11.8.0 + ``` +5. Build the docker container and mount the paths from the host machine. + ** You may want to change the `scratch_path` location as it can take 100s of GBs.** + ```bash + cm docker script --tags=build,nvidia,inference,server \ + --cuda_run_file_path=$HOME/cuda_11.8.0_520.61.05_linux.run \ + --tensorrt_tar_file_path=$HOME/TensorRT-8.6.1.6.Linux.x86_64-gnu.cuda-11.8.tar.gz \ + --cudnn_tar_file_path=$HOME/cudnn-linux-x86_64-8.9.2.26_cuda11-archive.tar.xz \ + --imagenet_path=$HOME/imagenet-2012-val \ + --scratch_path=$HOME/mlperf_scratch \ + --docker_cm_repo=mlcommons@cm4mlops \ + --results_dir=$HOME/results_dir \ + --submission_dir=$HOME/submission_dir \ + --adr.compiler.tags=gcc + ``` + * Use `--docker_cache=no` to turn off docker caching + * Use `--docker_run_cmd_prefix="cm pull repo mlcommons@cm4mlops"` to update the CK repository when docker caching is used + * Use `--custom_system=no` if you are using a similar system to the [Nvidia submission systems for MLPerf inference 3.0](https://github.com/mlcommons/inference_results_v3.0/tree/main/closed/NVIDIA/systems). + +6. At the end of the build you'll get the following prompt unless you have chosen `--custom_system=no`. Please give a system name and say yes to generating the configuration files + ### Example output + ``` + ============================================ + => A system ID is a string containing only letters, numbers, and underscores + => that is used as the human-readable name of the system. It is also used as + => the system name when creating the measurements/ and results/ entries. + => This string should also start with a letter to be a valid Python enum member name. + => Specify the system ID to use for the current system: phoenix + => Reloaded system list. MATCHED_SYSTEM: KnownSystem.phoenix + => This script will generate Benchmark Configuration stubs for the detected system. + Continue? [y/n]: y + ``` + Now you'll be inside the CM Nvidia docker container and can run further scripts. + +7. Once the build is complete, you can proceed with any further CM scripts like for MLPerf inference. You can also save the container at this stage using [docker commit](https://docs.docker.com/engine/reference/commandline/commit/) so that it can be launched later without having to go through the previous steps. + +
+ +
+ + + +## Without Docker + + +1. Install CUDA + If CUDA is not detected, CM should download and install it automatically when you run the workflow. + ** Nvidia drivers are expected to be installed on the system ** + +2. Install cuDNN + ```bash + cmr "get cudnn" --tar_file= + ``` +3. Install TensorRT + ```bash + cmr "get tensorrt _dev" --tar_file= + ``` + On non x86 systems like Nvidia Orin, you can do a package manager install and then CM should pick up the installation automatically during the workflow run. + +4. Build the Nvidia inference server + ``` + cmr "build nvidia inference server" \ + --adr.install-cuda-prebuilt.local_run_file_path=/data/cuda_11.8.0_520.61.05_linux.run \ + --adr.tensorrt.tar_file=/data/TensorRT-8.6.1.6.Linux.x86_64-gnu.cuda-11.8.tar.gz \ + --adr.cudnn.tar_file=/data/cudnn-linux-x86_64-8.9.2.26_cuda11-archive.tar.xz \ + --adr.compiler.tags=gcc \ + [--custom_system=no] + ``` + Use `--custom_system=no` if you are using a similar system to the [Nvidia submission systems for MLPerf inference 3.0](https://github.com/mlcommons/inference_results_v3.0/tree/main/closed/NVIDIA/systems). + +5. At the end of the build you'll get the following prompt unless you have chosen `--custom_system=no`. Please give a system name and say yes to generating the configuration files + + ### Example output + ``` + ============================================ + => A system ID is a string containing only letters, numbers, and underscores + => that is used as the human-readable name of the system. It is also used as + => the system name when creating the measurements/ and results/ entries. + => This string should also start with a letter to be a valid Python enum member name. + => Specify the system ID to use for the current system: phoenix + => Reloaded system list. MATCHED_SYSTEM: KnownSystem.phoenix + => This script will generate Benchmark Configuration stubs for the detected system. + Continue? [y/n]: y + ``` +
+ + +## Acknowledgments + +* A common CM interface and automation for MLPerf inference benchmarks was developed by Arjun Suresh and Grigori Fursin + sponsored by the [cTuning foundation](https://cTuning.org) and [cKnowledge.org](https://cKnowledge.org). +* Nvidia's MLPerf inference implementation was developed by Zhihan Jiang, Ethan Cheng, Yiheng Zhang and Jinho Suh. + + + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "reproduce mlcommons mlperf inference harness nvidia-harness nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia` + +`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia[,variations] [--input_flags]` + +*or* + +`cmr "reproduce mlcommons mlperf inference harness nvidia-harness nvidia"` + +`cmr "reproduce mlcommons mlperf inference harness nvidia-harness nvidia [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "reproduce mlcommons mlperf inference harness nvidia-harness nvidia[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *Internal group (variations should not be selected manually)* +
+ Click here to expand this section. + + * `_3d-unet_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.nibabel + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pandas + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_bert_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_transformers + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_safetensors + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_dlrm_` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torchsnapshot + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.torchrec + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.fbgemm-gpu + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx-graphsurgeon + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.scikit-learn + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_gptj_` + - Environment variables: + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://cloud.mlcommons.org/index.php/s/QAZ2oM94MkFtbQx/download` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_package.datasets + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.simplejson + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + +
+ + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_a100,sxm,3d-unet_,offline,run_harness` + - Workflow: + * `_a100,sxm,bert_,offline,run_harness` + - Workflow: + * `_a100,sxm,dlrm_,offline,run_harness` + - Workflow: + * `_a100,sxm,resnet50,offline,run_harness` + - Environment variables: + - *CM_MLPERF_PERFORMANCE_SAMPLE_COUNT*: `2048` + - Workflow: + * `_a100,sxm,retinanet,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_WORKSPACE_SIZE*: `300000000000` + - Workflow: + * `_a100,sxm,rnnt,offline,run_harness` + - Workflow: + * `_gptj_,build` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * install,pytorch,from.src,_for-nvidia-mlperf-inference-v3.1 + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * `_gptj_,build_engine` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * install,pytorch,from.src,_for-nvidia-mlperf-inference-v3.1 + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * `_gptj_,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_USE_FP8*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_ENABLE_SORT*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_NUM_SORT_SEGMENTS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_SKIP_POSTPROCESS*: `True` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * install,pytorch,from.src,_for-nvidia-mlperf-inference-v3.1 + - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * `_gpu_memory.16,3d-unet_,offline,run_harness` + - Workflow: + * `_gpu_memory.16,bert_,offline,run_harness` + - Workflow: + * `_gpu_memory.16,dlrm_,offline,run_harness` + - Workflow: + * `_gpu_memory.16,gptj_,offline,run_harness` + - Workflow: + * `_gpu_memory.16,resnet50,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` + - Workflow: + * `_gpu_memory.16,retinanet,offline,run_harness` + - Workflow: + * `_gpu_memory.16,rnnt,offline,run_harness` + - Workflow: + * `_gpu_memory.24,3d-unet_,offline,run_harness` + - Workflow: + * `_gpu_memory.24,bert_,offline,run_harness` + - Workflow: + * `_gpu_memory.24,dlrm_,offline,run_harness` + - Workflow: + * `_gpu_memory.24,gptj_,offline,run_harness` + - Workflow: + * `_gpu_memory.24,resnet50,offline,run_harness` + - Workflow: + * `_gpu_memory.24,retinanet,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - Workflow: + * `_gpu_memory.24,rnnt,offline,run_harness` + - Workflow: + * `_gpu_memory.32,3d-unet_,offline,run_harness` + - Workflow: + * `_gpu_memory.32,bert_,offline,run_harness` + - Workflow: + * `_gpu_memory.32,dlrm_,offline,run_harness` + - Workflow: + * `_gpu_memory.32,gptj_,offline,run_harness` + - Workflow: + * `_gpu_memory.32,resnet50,offline,run_harness` + - Workflow: + * `_gpu_memory.32,retinanet,offline,run_harness` + - Workflow: + * `_gpu_memory.32,rnnt,offline,run_harness` + - Workflow: + * `_gpu_memory.40,3d-unet_,offline,run_harness` + - Workflow: + * `_gpu_memory.40,bert_,offline,run_harness` + - Workflow: + * `_gpu_memory.40,dlrm_,offline,run_harness` + - Workflow: + * `_gpu_memory.40,gptj_,offline,run_harness` + - Workflow: + * `_gpu_memory.40,resnet50,offline,run_harness` + - Workflow: + * `_gpu_memory.40,retinanet,offline,run_harness` + - Workflow: + * `_gpu_memory.40,rnnt,offline,run_harness` + - Workflow: + * `_gpu_memory.48,3d-unet_,offline,run_harness` + - Workflow: + * `_gpu_memory.48,bert_,offline,run_harness` + - Workflow: + * `_gpu_memory.48,dlrm_,offline,run_harness` + - Workflow: + * `_gpu_memory.48,gptj_,offline,run_harness` + - Workflow: + * `_gpu_memory.48,resnet50,offline,run_harness` + - Workflow: + * `_gpu_memory.48,retinanet,offline,run_harness` + - Workflow: + * `_gpu_memory.48,rnnt,offline,run_harness` + - Workflow: + * `_gpu_memory.80,3d-unet_,offline,run_harness` + - Workflow: + * `_gpu_memory.80,bert_,server,run_harness` + - Workflow: + * `_gpu_memory.80,dlrm_,offline,run_harness` + - Workflow: + * `_gpu_memory.80,gptj_,offline,run_harness` + - Workflow: + * `_gpu_memory.80,resnet50,offline,run_harness` + - Workflow: + * `_gpu_memory.80,retinanet,offline,run_harness` + - Workflow: + * `_gpu_memory.80,rnnt,offline,run_harness` + - Workflow: + * `_l4,3d-unet_,offline,run_harness` + - Workflow: + * `_l4,bert_,offline,run_harness` + - Workflow: + * `_l4,bert_,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GRAPHS_MAX_SEQLEN*: `200` + - *CM_MLPERF_NVIDIA_HARNESS_SERVER_NUM_ISSUE_QUERY_THREADS*: `1` + - *CM_MLPERF_NVIDIA_HARNESS_SOFT_DROP*: `1.0` + - *CM_MLPERF_NVIDIA_HARNESS_USE_SMALL_TILE_GEMM_PLUGIN*: `True` + - Workflow: + * `_l4,dlrm_,offline,run_harness` + - Workflow: + * `_l4,resnet50` + - Workflow: + * `_l4,resnet50,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `1` + - *CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS*: `True` + - Workflow: + * `_l4,resnet50,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `9` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC*: `2000` + - *CM_MLPERF_NVIDIA_HARNESS_USE_CUDA_THREAD_PER_DEVICE*: `True` + - Workflow: + * `_l4,retinanet,offline,run_harness` + - Workflow: + * `_l4,retinanet,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC*: `30000` + - *CM_MLPERF_NVIDIA_HARNESS_WORKSPACE_SIZE*: `20000000000` + - Workflow: + * `_l4,rnnt,offline,run_harness` + - Workflow: + * `_l4,rnnt,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_AUDIO_BATCH_SIZE*: `64` + - *CM_MLPERF_NVIDIA_HARNESS_AUDIO_BUFFER_NUM_LINES*: `1024` + - *CM_MLPERF_NVIDIA_HARNESS_NUM_WARMUPS*: `1024` + - Workflow: + * `_multistream,resnet50` + - Environment variables: + - *SKIP_POLICIES*: `1` + - Workflow: + * `_orin,rnnt,singlestream,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_NUM_WARMUPS*: `1` + - Workflow: + * `_resnet50,multistream,run_harness,num-gpus.1` + - Workflow: + * `_resnet50,multistream,run_harness,num-gpus.2` + - Workflow: + * `_resnet50,server,run_harness` + - Workflow: + * `_retinanet,multistream,run_harness` + - Workflow: + * `_retinanet,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - Workflow: + * `_rtx_4090,3d-unet_,offline,run_harness` + - Workflow: + * `_rtx_4090,3d-unet_,server,run_harness` + - Workflow: + * `_rtx_4090,bert_,offline,run_harness` + - Workflow: + * `_rtx_4090,bert_,server,run_harness` + - Workflow: + * `_rtx_4090,dlrm_,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_EMBEDDING_WEIGHTS_ON_GPU_PART*: `0.30` + - Workflow: + * `_rtx_4090,gptj_,offline,run_harness` + - Workflow: + * `_rtx_4090,gptj_,server,run_harness` + - Workflow: + * `_rtx_4090,resnet50,offline,run_harness` + - Workflow: + * `_rtx_4090,resnet50,server,run_harness` + - Workflow: + * `_rtx_4090,retinanet,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - Workflow: + * `_rtx_4090,retinanet,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - Workflow: + * `_rtx_4090,rnnt,offline,run_harness` + - Workflow: + * `_rtx_4090,rnnt,server,run_harness` + - Workflow: + * `_rtx_6000_ada,3d-unet_,offline,run_harness` + - Workflow: + * `_rtx_6000_ada,3d-unet_,server,run_harness` + - Workflow: + * `_rtx_6000_ada,bert_,offline,run_harness` + - Workflow: + * `_rtx_6000_ada,bert_,server,run_harness` + - Workflow: + * `_rtx_6000_ada,dlrm_,offline,run_harness` + - Workflow: + * `_rtx_6000_ada,resnet50,offline,run_harness` + - Workflow: + * `_rtx_6000_ada,resnet50,server,run_harness` + - Workflow: + * `_rtx_6000_ada,retinanet,offline,run_harness` + - Workflow: + * `_rtx_6000_ada,retinanet,server,run_harness` + - Workflow: + * `_rtx_6000_ada,rnnt,offline,run_harness` + - Workflow: + * `_rtx_6000_ada,rnnt,server,run_harness` + - Workflow: + * `_rtx_a6000,3d-unet_,offline,run_harness` + - Workflow: + * `_rtx_a6000,3d-unet_,server,run_harness` + - Workflow: + * `_rtx_a6000,bert_,offline,run_harness` + - Workflow: + * `_rtx_a6000,bert_,server,run_harness` + - Workflow: + * `_rtx_a6000,dlrm_,offline,run_harness` + - Workflow: + * `_rtx_a6000,resnet50,offline,run_harness` + - Workflow: + * `_rtx_a6000,resnet50,server,run_harness` + - Workflow: + * `_rtx_a6000,retinanet,offline,run_harness` + - Workflow: + * `_rtx_a6000,retinanet,server,run_harness` + - Workflow: + * `_rtx_a6000,rnnt,offline,run_harness` + - Workflow: + * `_rtx_a6000,rnnt,server,run_harness` + - Workflow: + * `_run-harness` + - Workflow: + * `_singlestream,resnet50` + - Environment variables: + - *SKIP_POLICIES*: `1` + - Workflow: + * `_singlestream,run_harness` + - Workflow: + * `_t4,3d-unet_,offline,run_harness` + - Workflow: + * `_t4,bert_,offline,run_harness` + - Workflow: + * `_t4,bert_,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GRAPHS_MAX_SEQLEN*: `240` + - *CM_MLPERF_NVIDIA_HARNESS_SERVER_NUM_ISSUE_QUERY_THREADS*: `0` + - *CM_MLPERF_NVIDIA_HARNESS_USE_SMALL_TILE_GEMM_PLUGIN*: `no` + - Workflow: + * `_t4,dlrm_,offline,run_harness` + - Workflow: + * `_t4,resnet50` + - Workflow: + * `_t4,resnet50,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` + - Workflow: + * `_t4,resnet50,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` + - *CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC*: `2000` + - *CM_MLPERF_NVIDIA_HARNESS_SOFT_DROP*: `0.993` + - Workflow: + * `_t4,retinanet,offline,run_harness` + - Workflow: + * `_t4,retinanet,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` + - *CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC*: `20000` + - *CM_MLPERF_NVIDIA_HARNESS_WORKSPACE_SIZE*: `20000000000` + - Workflow: + * `_t4,rnnt,offline,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` + - *CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_AUDIO_BATCH_SIZE*: `128` + - *CM_MLPERF_NVIDIA_HARNESS_DISABLE_ENCODER_PLUGIN*: `True` + - Workflow: + * `_t4,rnnt,server,run_harness` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` + - *CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS*: `True` + - *CM_MLPERF_NVIDIA_HARNESS_AUDIO_BATCH_SIZE*: `128` + - *CM_MLPERF_NVIDIA_HARNESS_DISABLE_ENCODER_PLUGIN*: `True` + - Workflow: + +
+ + + * Group "**backend**" +
+ Click here to expand this section. + + * **`_tensorrt`** (default) + - Environment variables: + - *CM_MLPERF_BACKEND*: `tensorrt` + - *CM_MLPERF_BACKEND_NAME*: `TensorRT` + - Workflow: + +
+ + + * Group "**batch-size**" +
+ Click here to expand this section. + + * `_batch_size.#` + - Environment variables: + - *CM_MODEL_BATCH_SIZE*: `#` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_BATCH_SIZE*: `#` + - Workflow: + +
+ + + * Group "**build-engine-options**" +
+ Click here to expand this section. + + * `_build_engine_options.#` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_EXTRA_BUILD_ENGINE_OPTIONS*: `#` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * `_cpu` + - Environment variables: + - *CM_MLPERF_DEVICE*: `cpu` + - Workflow: + * **`_cuda`** (default) + - Environment variables: + - *CM_MLPERF_DEVICE*: `gpu` + - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` + - Workflow: + +
+ + + * Group "**device-memory**" +
+ Click here to expand this section. + + * `_gpu_memory.16` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `16` + - Workflow: + * `_gpu_memory.24` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `24` + - Workflow: + * `_gpu_memory.32` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `32` + - Workflow: + * `_gpu_memory.40` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `40` + - Workflow: + * `_gpu_memory.48` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `48` + - Workflow: + * `_gpu_memory.8` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `8` + - Workflow: + * `_gpu_memory.80` + - Environment variables: + - *CM_NVIDIA_GPU_MEMORY*: `80` + - Workflow: + +
+ + + * Group "**dla-batch-size**" +
+ Click here to expand this section. + + * `_dla_batch_size.#` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_DLA_BATCH_SIZE*: `#` + - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX2*: `dla_batch_size.#` + - Workflow: + +
+ + + * Group "**gpu-connection**" +
+ Click here to expand this section. + + * `_pcie` + - Workflow: + * `_sxm` + - Workflow: + +
+ + + * Group "**gpu-name**" +
+ Click here to expand this section. + + * `_a100` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - Workflow: + * `_a6000` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - Workflow: + * `_custom` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - *CM_MODEL_BATCH_SIZE*: `` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_BATCH_SIZE*: `<<>>` + - Workflow: + * `_l4` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - Workflow: + * `_orin` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - *CM_MODEL_BATCH_SIZE*: `` + - *CM_MLPERF_NVIDIA_HARNESS_GPU_BATCH_SIZE*: `<<>>` + - Workflow: + * `_rtx_4090` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - Workflow: + * `_rtx_6000_ada` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - Workflow: + * `_t4` + - Environment variables: + - *CM_NVIDIA_CUSTOM_GPU*: `yes` + - Workflow: + +
+ + + * Group "**loadgen-scenario**" +
+ Click here to expand this section. + + * `_multistream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` + - Workflow: + * `_offline` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` + - Workflow: + * `_server` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` + - Workflow: + * `_singlestream` + - Environment variables: + - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` + - *CUDA_VISIBLE_DEVICES_NOT_USED*: `0` + - Workflow: + +
+ + + * Group "**model**" +
+ Click here to expand this section. + + * `_3d-unet-99` + - Environment variables: + - *CM_MODEL*: `3d-unet-99` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128.onnx` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - Workflow: + * `_3d-unet-99.9` + - Environment variables: + - *CM_MODEL*: `3d-unet-99.9` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128.onnx` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - Workflow: + * `_bert-99` + - Environment variables: + - *CM_MODEL*: `bert-99` + - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - Workflow: + * `_bert-99.9` + - Environment variables: + - *CM_MODEL*: `bert-99.9` + - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` + - Workflow: + * `_dlrm-v2-99` + - Environment variables: + - *CM_MODEL*: `dlrm-v2-99` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` + - Workflow: + * `_dlrm-v2-99.9` + - Environment variables: + - *CM_MODEL*: `dlrm-v2-99.9` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` + - Workflow: + * `_gptj-99` + - Environment variables: + - *CM_MODEL*: `gptj-99` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` + - Workflow: + * `_gptj-99.9` + - Environment variables: + - *CM_MODEL*: `gptj-99.9` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int32` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` + - Workflow: + * **`_resnet50`** (default) + - Environment variables: + - *CM_MODEL*: `resnet50` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_onnx-graphsurgeon + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_retinanet` + - Environment variables: + - *CM_MODEL*: `retinanet` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/6617981/files/resnext50_32x4d_fpn.pth` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_Pillow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_pycocotools + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_onnx-graphsurgeon + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_package.onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_rnnt` + - Environment variables: + - *CM_MODEL*: `rnnt` + - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3662521/files/DistributedDataParallel_1576581068.9962234-epoch-100.pt` + - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` + - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp16` + - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_toml + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + * CM names: `--adr.['torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torch + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_nvidia-apex + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_unidecode + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_inflect + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_librosa + * CM names: `--adr.['librosa']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_sox + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-sys-util,_sox + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + +
+ + + * Group "**num-gpus**" +
+ Click here to expand this section. + + * `_num-gpus.#` + - Environment variables: + - *CM_NVIDIA_NUM_GPUS*: `#` + - Workflow: + * **`_num-gpus.1`** (default) + - Environment variables: + - *CM_NVIDIA_NUM_GPUS*: `1` + - Workflow: + +
+ + + * Group "**power-mode**" +
+ Click here to expand this section. + + * `_maxn` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_MAXN*: `True` + - Workflow: + * `_maxq` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_MAXQ*: `True` + - Workflow: + +
+ + + * Group "**run-mode**" +
+ Click here to expand this section. + + * `_build` + - Environment variables: + - *MLPERF_NVIDIA_RUN_COMMAND*: `build` + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `build` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,generic,sys-util,_glog-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_gflags-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libgmock-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libre2-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libnuma-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_libboost-all-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,generic,sys-util,_rapidjson-dev + - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,tensorrt + * CM names: `--adr.['tensorrt']...` + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * build,nvidia,inference,server + * CM names: `--adr.['nvidia-inference-server']...` + - CM script: [build-mlperf-inference-server-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/build-mlperf-inference-server-nvidia) + * `_build_engine` + - Aliases: `_build-engine` + - Environment variables: + - *MLPERF_NVIDIA_RUN_COMMAND*: `generate_engines` + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `generate_engines` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,tensorrt + * CM names: `--adr.['tensorrt']...` + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * build,nvidia,inference,server + * CM names: `--adr.['nvidia-inference-server']...` + - CM script: [build-mlperf-inference-server-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/build-mlperf-inference-server-nvidia) + * reproduce,mlperf,inference,nvidia,harness,_preprocess_data + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MODEL': ['dlrm-v2-99', 'dlrm-v2-99.9']}` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * reproduce,mlperf,inference,nvidia,harness,_download_model + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet_old', 'resnet50', 'bert-99', 'bert-99.9', 'dlrm-v2-99', 'dlrm-v2-99.9']}` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * reproduce,mlperf,inference,nvidia,harness,_calibrate + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * `_calibrate` + - Environment variables: + - *MLPERF_NVIDIA_RUN_COMMAND*: `calibrate` + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `calibrate` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * reproduce,mlperf,inference,nvidia,harness,_download_model + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet_old', 'resnet50', 'bert-99', 'bert-99.9', 'dlrm-v2-99', 'dlrm-v2-99.9']}` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * `_download_model` + - Environment variables: + - *MLPERF_NVIDIA_RUN_COMMAND*: `download_model` + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `download_model` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,generic-python-lib,_torch_cuda + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * `_prebuild` + - Environment variables: + - *MLPERF_NVIDIA_RUN_COMMAND*: `prebuild` + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `prebuild` + - Workflow: + * `_preprocess_data` + - Environment variables: + - *MLPERF_NVIDIA_RUN_COMMAND*: `preprocess_data` + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `preprocess_data` + - Workflow: + * **`_run_harness`** (default) + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `run_harness` + - *MLPERF_NVIDIA_RUN_COMMAND*: `run_harness` + - *CM_CALL_MLPERF_RUNNER*: `yes` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * get,cuda,_cudnn + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + * get,tensorrt + * CM names: `--adr.['tensorrt']...` + - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) + * build,nvidia,inference,server + * CM names: `--adr.['nvidia-inference-server']...` + - CM script: [build-mlperf-inference-server-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/build-mlperf-inference-server-nvidia) + * reproduce,mlperf,inference,nvidia,harness,_build_engine + * CM names: `--adr.['build-engine']...` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * reproduce,mlperf,inference,nvidia,harness,_preprocess_data + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MODEL': ['dlrm-v2-99', 'dlrm-v2-99.9']}` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + * reproduce,mlperf,inference,nvidia,harness,_download_model + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet', 'resnet50', 'bert-99', 'bert-99.9', 'dlrm-v2-99', 'dlrm-v2-99.9']}` + - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) + +
+ + + * Group "**triton**" +
+ Click here to expand this section. + + * `_use_triton` + - Environment variables: + - *CM_MLPERF_NVIDIA_HARNESS_USE_TRITON*: `yes` + - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX3*: `using_triton` + - Workflow: + +
+ + +#### Default variations + +`_cuda,_num-gpus.1,_resnet50,_run_harness,_tensorrt` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--audio_buffer_num_lines=value` → `CM_MLPERF_NVIDIA_HARNESS_AUDIO_BUFFER_NUM_LINES=value` +* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` +* `--deque_timeout_usec=value` → `CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC=value` +* `--devices=value` → `CM_MLPERF_NVIDIA_HARNESS_DEVICES=value` +* `--dla_batch_size=value` → `CM_MLPERF_NVIDIA_HARNESS_DLA_BATCH_SIZE=value` +* `--dla_copy_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_DLA_COPY_STREAMS=value` +* `--dla_inference_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_DLA_INFERENCE_STREAMS=value` +* `--embedding_weights_on_gpu_part=value` → `CM_MLPERF_NVIDIA_HARNESS_EMBEDDING_WEIGHTS_ON_GPU_PART=value` +* `--enable_sort=value` → `CM_MLPERF_NVIDIA_HARNESS_ENABLE_SORT=value` +* `--end_on_device=value` → `CM_MLPERF_NVIDIA_HARNESS_END_ON_DEVICE=value` +* `--extra_run_options=value` → `CM_MLPERF_NVIDIA_HARNESS_EXTRA_RUN_OPTIONS=value` +* `--gpu_batch_size=value` → `CM_MLPERF_NVIDIA_HARNESS_GPU_BATCH_SIZE=value` +* `--gpu_copy_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS=value` +* `--gpu_inference_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS=value` +* `--graphs_max_seqlen=value` → `CM_MLPERF_NVIDIA_HARNESS_GRAPHS_MAX_SEQLEN=value` +* `--input_format=value` → `CM_MLPERF_NVIDIA_HARNESS_INPUT_FORMAT=value` +* `--log_dir=value` → `CM_MLPERF_NVIDIA_HARNESS_LOG_DIR=value` +* `--make_cmd=value` → `MLPERF_NVIDIA_RUN_COMMAND=value` +* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` +* `--max_dlas=value` → `CM_MLPERF_NVIDIA_HARNESS_MAX_DLAS=value` +* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` +* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` +* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` +* `--num_issue_query_threads=value` → `CM_MLPERF_NVIDIA_HARNESS_NUM_ISSUE_QUERY_THREADS=value` +* `--num_sort_segments=value` → `CM_MLPERF_NVIDIA_HARNESS_NUM_SORT_SEGMENTS=value` +* `--num_warmups=value` → `CM_MLPERF_NVIDIA_HARNESS_NUM_WARMUPS=value` +* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` +* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` +* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` +* `--power_setting=value` → `CM_MLPERF_NVIDIA_HARNESS_POWER_SETTING=value` +* `--rerun=value` → `CM_RERUN=value` +* `--run_infer_on_copy_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_RUN_INFER_ON_COPY_STREAMS=value` +* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` +* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` +* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` +* `--skip_postprocess=value` → `CM_MLPERF_NVIDIA_HARNESS_SKIP_POSTPROCESS=value` +* `--skip_preprocess=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--skip_preprocessing=value` → `CM_SKIP_PREPROCESS_DATASET=value` +* `--soft_drop=value` → `CM_MLPERF_NVIDIA_HARNESS_SOFT_DROP=value` +* `--start_from_device=value` → `CM_MLPERF_NVIDIA_HARNESS_START_FROM_DEVICE=value` +* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` +* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` +* `--use_cuda_thread_per_device=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_CUDA_THREAD_PER_DEVICE=value` +* `--use_deque_limit=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT=value` +* `--use_fp8=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_FP8=value` +* `--use_graphs=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS=value` +* `--use_small_tile_gemm_plugin=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_SMALL_TILE_GEMM_PLUGIN=value` +* `--use_triton=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_TRITON=value` +* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` +* `--workspace_size=value` → `CM_MLPERF_NVIDIA_HARNESS_WORKSPACE_SIZE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "audio_buffer_num_lines":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_BATCH_COUNT: `1` +* CM_BATCH_SIZE: `1` +* CM_FAST_COMPILATION: `yes` +* CM_MLPERF_LOADGEN_SCENARIO: `Offline` +* CM_MLPERF_LOADGEN_MODE: `performance` +* CM_SKIP_PREPROCESS_DATASET: `no` +* CM_SKIP_MODEL_DOWNLOAD: `no` +* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `nvidia_original` +* CM_MLPERF_SKIP_RUN: `no` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,mlperf,inference,nvidia,scratch,space + * CM names: `--adr.['nvidia-scratch-space']...` + - CM script: [get-mlperf-inference-nvidia-scratch-space](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-scratch-space) + * get,generic-python-lib,_mlperf_logging + * CM names: `--adr.['mlperf-logging']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,dataset,original,imagenet,_full + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['imagenet-original']...` + - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) + * get,ml-model,resnet50,_fp32,_onnx,_opset-8 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['resnet50']}` + * CM names: `--adr.['resnet50-model', 'ml-model']...` + - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) + * get,dataset,original,kits19 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['3d-unet-99-disabled', '3d-unet-99.9-disabled']}` + * CM names: `--adr.['kits19-original']...` + - CM script: [get-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-kits19) + * get,dataset,original,librispeech + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['rnnt']}` + * CM names: `--adr.['librispeech-original']...` + - CM script: [get-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-librispeech) + * get,dataset,preprocessed,criteo + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['dlrm-v2-99', 'dlrm-v2-99.9']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'DLRM_DATA_PATH': [True]}` + * CM names: `--adr.['criteo-preprocessed']...` + - CM script: [get-preprocessed-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-criteo) + * get,ml-model,dlrm,_pytorch + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['dlrm-v2-99', 'dlrm-v2-99.9']}` + * Skip this dependenecy only if all ENV vars are set:
+`{'DLRM_DATA_PATH': [True]}` + * CM names: `--adr.['dlrm-model']...` + - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) + * get,ml-model,bert,_onnx,_fp32 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * CM names: `--adr.['bert-model', 'bert-model-fp32']...` + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + * get,ml-model,bert,_onnx,_int8 + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * CM names: `--adr.['bert-model', 'bert-model-int8']...` + - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) + * get,squad-vocab + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['bert-99', 'bert-99.9']}` + * CM names: `--adr.['bert-vocab']...` + - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) + * get,dataset,original,openimages,_validation,_full,_custom-annotations + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['openimages-original']...` + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + * get,dataset,original,openimages,_calibration + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['retinanet']}` + * CM names: `--adr.['openimages-calibration']...` + - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) + * get,dataset,original,openorca + * Enable this dependency only if all ENV vars are set:
+`{'CM_MODEL': ['gptj-99', 'gptj-99.9'], 'CM_MLPERF_NVIDIA_HARNESS_RUN_MODE': ['preprocess_dataset']}` + * CM names: `--adr.['openorca-original']...` + - CM script: [get-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openorca) + * get,mlcommons,inference,src + * CM names: `--adr.['inference-src']...` + - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) + * get,nvidia,mlperf,inference,common-code + * CM names: `--adr.['nvidia-inference-common-code']...` + - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) + * generate,user-conf,mlperf,inference + * Enable this dependency only if all ENV vars are set:
+`{'CM_MLPERF_NVIDIA_HARNESS_RUN_MODE': ['run_harness']}` + * CM names: `--adr.['user-conf-generator']...` + - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) + * get,generic-python-lib,_package.nvmitten,_path./opt/nvmitten-0.1.3-cp38-cp38-linux_x86_64.whl + * Enable this dependency only if all ENV vars are set:
+`{'CM_RUN_STATE_DOCKER': ['yes', True, 'True']}` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,nvidia,mitten + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_RUN_STATE_DOCKER': ['yes', True, 'True']}` + - CM script: [get-nvidia-mitten](https://github.com/mlcommons/cm4mlops/tree/master/script/get-nvidia-mitten) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/customize.py)*** + 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/_cm.yaml)*** + * get,ml-model,gptj,_pytorch,_rclone + * Enable this dependency only if all ENV vars are set:
+`{'CM_REQUIRE_GPTJ_MODEL_DOWNLOAD': ['yes'], 'CM_MLPERF_NVIDIA_HARNESS_RUN_MODE': ['download_model', 'preprocess_data']}` + * CM names: `--adr.['gptj-model']...` + - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/_cm.yaml)*** + * benchmark-mlperf + * Enable this dependency only if all ENV vars are set:
+`{'CM_CALL_MLPERF_RUNNER': [True]}` + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_MLPERF_SKIP_RUN': ['yes', True]}` + * CM names: `--adr.['runner', 'mlperf-runner']...` + - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) + * save,mlperf,inference,state + * CM names: `--adr.['save-mlperf-inference-state']...` + - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) + +___ +### Script output +`cmr "reproduce mlcommons mlperf inference harness nvidia-harness nvidia [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results.md b/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results.md new file mode 100644 index 000000000..79baf27ea --- /dev/null +++ b/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results.md @@ -0,0 +1,214 @@ +Automatically generated README for this automation recipe: **reproduce-mlperf-octoml-tinyml-results** + +Category: **Reproduce MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=reproduce-mlperf-octoml-tinyml-results,a63803a707d04332) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *reproduce,tiny,results,mlperf,octoml,mlcommons* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "reproduce tiny results mlperf octoml mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=reproduce,tiny,results,mlperf,octoml,mlcommons` + +`cm run script --tags=reproduce,tiny,results,mlperf,octoml,mlcommons[,variations] [--input_flags]` + +*or* + +`cmr "reproduce tiny results mlperf octoml mlcommons"` + +`cmr "reproduce tiny results mlperf octoml mlcommons [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,tiny,results,mlperf,octoml,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,tiny,results,mlperf,octoml,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "reproduce tiny results mlperf octoml mlcommons[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_NRF` + - Environment variables: + - *CM_TINY_BOARD*: `NRF5340DK` + - Workflow: + * `_NUCLEO` + - Environment variables: + - *CM_TINY_BOARD*: `NUCLEO_L4R5ZI` + - Workflow: + * `_ad` + - Environment variables: + - *CM_TINY_MODEL*: `ad` + - Workflow: + * `_cmsis_nn` + - Environment variables: + - *CM_MICROTVM_VARIANT*: `microtvm_cmsis_nn` + - Workflow: + * `_ic` + - Environment variables: + - *CM_TINY_MODEL*: `ic` + - Workflow: + * `_kws` + - Environment variables: + - *CM_TINY_MODEL*: `kws` + - Workflow: + * `_native` + - Environment variables: + - *CM_MICROTVM_VARIANT*: `microtvm_native` + - Workflow: + * `_vww` + - Environment variables: + - *CM_TINY_MODEL*: `vww` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--flash=value` → `CM_FLASH_BOARD=value` +* `--recreate_binary=value` → `CM_RECREATE_BINARY=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "flash":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `r1.0` + +* `r1.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,zephyr + * CM names: `--adr.['zephyr']...` + - CM script: [get-zephyr](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr) + * get,zephyr-sdk + * CM names: `--adr.['zephyr-sdk']...` + - CM script: [get-zephyr-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr-sdk) + * get,cmsis + * CM names: `--adr.['cmsis']...` + - CM script: [get-cmsis_5](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmsis_5) + * get,microtvm + * CM names: `--adr.['microtvm']...` + - CM script: [get-microtvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-microtvm) + * get,cmake + * CM names: `--adr.['cmake']...` + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,gcc + - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/customize.py)*** + 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/_cm.json)*** + * flash,tiny,mlperf + * Enable this dependency only if all ENV vars are set:
+`{'CM_FLASH_BOARD': ['True']}` + - CM script: [flash-tinyml-binary](https://github.com/mlcommons/cm4mlops/tree/master/script/flash-tinyml-binary) + +___ +### Script output +`cmr "reproduce tiny results mlperf octoml mlcommons [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_TINY_*` +#### New environment keys auto-detected from customize + +* `CM_TINY_MODEL` \ No newline at end of file diff --git a/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia.md b/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia.md new file mode 100644 index 000000000..86f08d6b8 --- /dev/null +++ b/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia.md @@ -0,0 +1,169 @@ +Automatically generated README for this automation recipe: **reproduce-mlperf-training-nvidia** + +Category: **Reproduce MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=reproduce-mlperf-training-nvidia,f183628f292341e2) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "reproduce mlcommons mlperf train training nvidia-training nvidia" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia` + +`cm run script --tags=reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia[,variations] [--input_flags]` + +*or* + +`cmr "reproduce mlcommons mlperf train training nvidia-training nvidia"` + +`cmr "reproduce mlcommons mlperf train training nvidia-training nvidia [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia"``` + +#### Run this script via Docker (beta) + +`cm docker script "reproduce mlcommons mlperf train training nvidia-training nvidia[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**benchmark**" +
+ Click here to expand this section. + + * `_resnet` + - Environment variables: + - *CM_MLPERF_TRAINING_BENCHMARK*: `resnet` + - Workflow: + 1. ***Read "deps" on other CM scripts*** + * prepare,mlperf,training,resnet,_nvidia + * CM names: `--adr.['prepare-training-data', 'nvidia-training-data']...` + - CM script: [prepare-training-data-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-resnet) + * get,nvidia,training,code + * CM names: `--adr.['nvidia-training-code']...` + - CM script: [get-mlperf-training-nvidia-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-nvidia-code) + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--results_dir=value` → `CM_MLPERF_RESULTS_DIR=value` +* `--system_conf_name=value` → `CM_MLPERF_NVIDIA_TRAINING_SYSTEM_CONF_NAME=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "results_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +* `r2.1` +* `r3.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * detect,cpu + - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) + * get,nvidia-docker + - CM script: [get-nvidia-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/get-nvidia-docker) + * get,cuda + * CM names: `--adr.['cuda']...` + - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/_cm.yaml) + 1. ***Run native script if exists*** + * [run-resnet.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/run-resnet.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/_cm.yaml) + +___ +### Script output +`cmr "reproduce mlcommons mlperf train training nvidia-training nvidia [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission.md b/docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission.md new file mode 100644 index 000000000..4854b325c --- /dev/null +++ b/docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission.md @@ -0,0 +1,140 @@ +Automatically generated README for this automation recipe: **wrapper-reproduce-octoml-tinyml-submission** + +Category: **Reproduce MLPerf benchmarks** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=wrapper-reproduce-octoml-tinyml-submission,b946001e289c4480) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml` + +`cm run script --tags=run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml [--input_flags]` + +*or* + +`cmr "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml"` + +`cmr "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml"``` + +#### Run this script via Docker (beta) + +`cm docker script "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--flash=value` → `CM_FLASH_BOARD=value` +* `--recreate_binary=value` → `CM_RECREATE_BINARY=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "flash":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `r1.0` + +* `r1.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/_cm.json) + +___ +### Script output +`cmr "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Reproducibility-and-artifact-evaluation/get-ipol-src.md b/docs/Reproducibility-and-artifact-evaluation/get-ipol-src.md new file mode 100644 index 000000000..755607bfb --- /dev/null +++ b/docs/Reproducibility-and-artifact-evaluation/get-ipol-src.md @@ -0,0 +1,146 @@ +Automatically generated README for this automation recipe: **get-ipol-src** + +Category: **Reproducibility and artifact evaluation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ipol-src,b6fd8213d03d4aa4) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ipol,journal,src,ipol-src* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get ipol journal src ipol-src" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ipol,journal,src,ipol-src` + +`cm run script --tags=get,ipol,journal,src,ipol-src [--input_flags]` + +*or* + +`cmr "get ipol journal src ipol-src"` + +`cmr "get ipol journal src ipol-src " [--input_flags]` + + + +#### Input Flags + +* --**number**=IPOL publication number +* --**year**=IPOL publication year + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "number":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ipol,journal,src,ipol-src' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ipol,journal,src,ipol-src"``` + +#### Run this script via Docker (beta) + +`cm docker script "get ipol journal src ipol-src" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--number=value` → `CM_IPOL_NUMBER=value` +* `--year=value` → `CM_IPOL_YEAR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "number":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/_cm.json) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/_cm.json) + +___ +### Script output +`cmr "get ipol journal src ipol-src " [--input_flags] -j` +#### New environment keys (filter) + +* `CM_IPOL_*` +#### New environment keys auto-detected from customize + +* `CM_IPOL_PATH` \ No newline at end of file diff --git a/docs/Reproducibility-and-artifact-evaluation/process-ae-users.md b/docs/Reproducibility-and-artifact-evaluation/process-ae-users.md new file mode 100644 index 000000000..38c4316ad --- /dev/null +++ b/docs/Reproducibility-and-artifact-evaluation/process-ae-users.md @@ -0,0 +1,136 @@ +Automatically generated README for this automation recipe: **process-ae-users** + +Category: **Reproducibility and artifact evaluation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=process-ae-users,5800f1ed677e4efb) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *process,ae,users* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "process ae users" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=process,ae,users` + +`cm run script --tags=process,ae,users [--input_flags]` + +*or* + +`cmr "process ae users"` + +`cmr "process ae users " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'process,ae,users' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="process,ae,users"``` + +#### Run this script via Docker (beta) + +`cm docker script "process ae users" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--file=value` → `CM_PROCESS_AE_USERS_INPUT_FILE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "file":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/_cm.json) + +___ +### Script output +`cmr "process ae users " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439.md b/docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439.md new file mode 100644 index 000000000..5212a87b0 --- /dev/null +++ b/docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439.md @@ -0,0 +1,148 @@ +Automatically generated README for this automation recipe: **reproduce-ipol-paper-2022-439** + +Category: **Reproducibility and artifact evaluation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=reproduce-ipol-paper-2022-439,f9b9e5bd65e34e4f) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439` + +`cm run script --tags=app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439 [--input_flags]` + +*or* + +`cmr "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439"` + +`cmr "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439 " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439"``` + +#### Run this script via Docker (beta) + +`cm docker script "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--image1=value` → `CM_IMAGE_1=value` +* `--image2=value` → `CM_IMAGE_2=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "image1":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,ipol,src + * CM names: `--adr.['ipol-src']...` + - CM script: [get-ipol-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ipol-src) + * get,generic-python-lib,_torch + * CM names: `--adr.['torch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + * CM names: `--adr.['torchvision']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/_cm.yaml) + +___ +### Script output +`cmr "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439 " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima.md b/docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima.md new file mode 100644 index 000000000..41b899e1e --- /dev/null +++ b/docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima.md @@ -0,0 +1,179 @@ +
+Click here to see the table of contents. + +* [About](#about) +* [Summary](#summary) +* [Reuse this script in your project](#reuse-this-script-in-your-project) + * [ Install CM automation language](#install-cm-automation-language) + * [ Check CM script flags](#check-cm-script-flags) + * [ Run this script from command line](#run-this-script-from-command-line) + * [ Run this script from Python](#run-this-script-from-python) + * [ Run this script via GUI](#run-this-script-via-gui) + * [ Run this script via Docker (beta)](#run-this-script-via-docker-(beta)) +* [Customization](#customization) + * [ Variations](#variations) + * [ Script flags mapped to environment](#script-flags-mapped-to-environment) + * [ Default environment](#default-environment) +* [Script workflow, dependencies and native scripts](#script-workflow-dependencies-and-native-scripts) +* [Script output](#script-output) +* [New environment keys (filter)](#new-environment-keys-(filter)) +* [New environment keys auto-detected from customize](#new-environment-keys-auto-detected-from-customize) +* [Maintainers](#maintainers) + +
+ +*Note that this README is automatically generated - don't edit!* + +### About + + +See extra [notes](README-extra.md) from the authors and contributors. + +#### Summary + +* Category: *Reproducibility and artifact evaluation.* +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* CM "database" tags to find this script: *reproduce,project,paper,micro,micro-2023,victima* +* Output cached? *False* +___ +### Reuse this script in your project + +#### Install CM automation language + +* [Installation guide](https://github.com/mlcommons/ck/blob/master/docs/installation.md) +* [CM intro](https://doi.org/10.5281/zenodo.8105339) + +#### Pull CM repository with this automation + +```cm pull repo mlcommons@cm4mlops --checkout=dev``` + + +#### Run this script from command line + +1. `cm run script --tags=reproduce,project,paper,micro,micro-2023,victima[,variations] [--input_flags]` + +2. `cmr "reproduce project paper micro micro-2023 victima[ variations]" [--input_flags]` + +* `variations` can be seen [here](#variations) + +* `input_flags` can be seen [here](#script-flags-mapped-to-environment) + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'reproduce,project,paper,micro,micro-2023,victima' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="reproduce,project,paper,micro,micro-2023,victima"``` + +Use this [online GUI](https://cKnowledge.org/cm-gui/?tags=reproduce,project,paper,micro,micro-2023,victima) to generate CM CMD. + +#### Run this script via Docker (beta) + +`cm docker script "reproduce project paper micro micro-2023 victima[ variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_install_deps` + - Workflow: + * `_plot` + - Workflow: + * `_run` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--container=value` → `CM_VICTIMA_CONTAINER=value` +* `--job_manager=value` → `CM_VICTIMA_JOB_MANAGER=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "container":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_VICTIMA_JOB_MANAGER: `native` +* CM_VICTIMA_CONTAINER: `docker` + +
+ +___ +### Script workflow, dependencies and native scripts + +
+Click here to expand this section. + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/main/script/detect-os) + * get,python + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/main/script/get-python3) + * get,git,repo,_repo.https://github.com/CMU-SAFARI/Victima + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/main/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/_cm.yaml) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/_cm.yaml) +
+ +___ +### Script output +`cmr "reproduce project paper micro micro-2023 victima[,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize + +___ +### Maintainers + +* [Open MLCommons taskforce on automation and reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) \ No newline at end of file diff --git a/docs/Tests/print-croissant-desc.md b/docs/Tests/print-croissant-desc.md new file mode 100644 index 000000000..fafb36774 --- /dev/null +++ b/docs/Tests/print-croissant-desc.md @@ -0,0 +1,144 @@ +Automatically generated README for this automation recipe: **print-croissant-desc** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-croissant-desc,59116d5c98a04d4f) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *print,croissant,desc* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "print croissant desc" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=print,croissant,desc` + +`cm run script --tags=print,croissant,desc [--input_flags]` + +*or* + +`cmr "print croissant desc"` + +`cmr "print croissant desc " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'print,croissant,desc' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="print,croissant,desc"``` + +#### Run this script via Docker (beta) + +`cm docker script "print croissant desc" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--url=value` → `CM_PRINT_CROISSANT_URL=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "url":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_PRINT_CROISSANT_URL: `https://raw.githubusercontent.com/mlcommons/croissant/main/datasets/1.0/gpt-3/metadata.json` + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,croissant + * CM names: `--adr.['croissant']...` + - CM script: [get-croissant](https://github.com/mlcommons/cm4mlops/tree/master/script/get-croissant) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/_cm.yaml) + +___ +### Script output +`cmr "print croissant desc " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-hello-world-java.md b/docs/Tests/print-hello-world-java.md new file mode 100644 index 000000000..2b51ce3f0 --- /dev/null +++ b/docs/Tests/print-hello-world-java.md @@ -0,0 +1,123 @@ +Automatically generated README for this automation recipe: **print-hello-world-java** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-hello-world-java,3b62dc46cce3489c) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *print,hello world,hello-world,hello,world,java* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "print hello world hello-world hello world java" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=print,hello world,hello-world,hello,world,java` + +`cm run script --tags=print,hello world,hello-world,hello,world,java ` + +*or* + +`cmr "print hello world hello-world hello world java"` + +`cmr "print hello world hello-world hello world java " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'print,hello world,hello-world,hello,world,java' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="print,hello world,hello-world,hello,world,java"``` + +#### Run this script via Docker (beta) + +`cm docker script "print hello world hello-world hello world java" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,java + * CM names: `--adr.['java']...` + - CM script: [get-java](https://github.com/mlcommons/cm4mlops/tree/master/script/get-java) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/_cm.json) + +___ +### Script output +`cmr "print hello world hello-world hello world java " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-hello-world-javac.md b/docs/Tests/print-hello-world-javac.md new file mode 100644 index 000000000..3e1db5b11 --- /dev/null +++ b/docs/Tests/print-hello-world-javac.md @@ -0,0 +1,123 @@ +Automatically generated README for this automation recipe: **print-hello-world-javac** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-hello-world-javac,040fafd538104819) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *print,hello world,hello-world,hello,world,javac* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "print hello world hello-world hello world javac" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=print,hello world,hello-world,hello,world,javac` + +`cm run script --tags=print,hello world,hello-world,hello,world,javac ` + +*or* + +`cmr "print hello world hello-world hello world javac"` + +`cmr "print hello world hello-world hello world javac " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'print,hello world,hello-world,hello,world,javac' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="print,hello world,hello-world,hello,world,javac"``` + +#### Run this script via Docker (beta) + +`cm docker script "print hello world hello-world hello world javac" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,javac + * CM names: `--adr.['javac']...` + - CM script: [get-javac](https://github.com/mlcommons/cm4mlops/tree/master/script/get-javac) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/_cm.json) + +___ +### Script output +`cmr "print hello world hello-world hello world javac " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-hello-world-py.md b/docs/Tests/print-hello-world-py.md new file mode 100644 index 000000000..ddfa31d1d --- /dev/null +++ b/docs/Tests/print-hello-world-py.md @@ -0,0 +1,129 @@ +Automatically generated README for this automation recipe: **print-hello-world-py** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-hello-world-py,d83274c7eb754d90) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *print,hello world,hello-world,hello,world,python* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "print hello world hello-world hello world python" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=print,hello world,hello-world,hello,world,python` + +`cm run script --tags=print,hello world,hello-world,hello,world,python ` + +*or* + +`cmr "print hello world hello-world hello world python"` + +`cmr "print hello world hello-world hello world python " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'print,hello world,hello-world,hello,world,python' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="print,hello world,hello-world,hello,world,python"``` + +#### Run this script via Docker (beta) + +`cm docker script "print hello world hello-world hello world python" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,sys-utils-cm + - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * print,python-version + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_SKIP_PRINT': ['True'], 'CM_SKIP_PRINT2': ['True']}` + - CM script: [print-python-version](https://github.com/mlcommons/cm4mlops/tree/master/script/print-python-version) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/_cm.json) + +___ +### Script output +`cmr "print hello world hello-world hello world python " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-hello-world.md b/docs/Tests/print-hello-world.md new file mode 100644 index 000000000..1505464e7 --- /dev/null +++ b/docs/Tests/print-hello-world.md @@ -0,0 +1,155 @@ +Automatically generated README for this automation recipe: **print-hello-world** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-hello-world,b9f0acba4aca4baa) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *print,hello-world,hello world,hello,world,native-script,native,script* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "print hello-world hello world hello world native-script native script" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=print,hello-world,hello world,hello,world,native-script,native,script` + +`cm run script --tags=print,hello-world,hello world,hello,world,native-script,native,script[,variations] [--input_flags]` + +*or* + +`cmr "print hello-world hello world hello world native-script native script"` + +`cmr "print hello-world hello world hello world native-script native script [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'print,hello-world,hello world,hello,world,native-script,native,script' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="print,hello-world,hello world,hello,world,native-script,native,script"``` + +#### Run this script via Docker (beta) + +`cm docker script "print hello-world hello world hello world native-script native script[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_skip_print_env` + - Environment variables: + - *CM_PRINT_HELLO_WORLD_SKIP_PRINT_ENV*: `yes` + - Workflow: + * `_text.#` + - Environment variables: + - *CM_PRINT_HELLO_WORLD_TEXT*: `#` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--test1=value` → `CM_ENV_TEST1=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "test1":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_ENV_TEST1: `TEST1` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/_cm.yaml) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/_cm.yaml) + +___ +### Script output +`cmr "print hello-world hello world hello world native-script native script [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_ENV_TEST*` +#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-python-version.md b/docs/Tests/print-python-version.md new file mode 100644 index 000000000..09db44012 --- /dev/null +++ b/docs/Tests/print-python-version.md @@ -0,0 +1,121 @@ +Automatically generated README for this automation recipe: **print-python-version** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-python-version,d3a538fa4abb464b) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *print,python,version,python-version* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "print python version python-version" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=print,python,version,python-version` + +`cm run script --tags=print,python,version,python-version ` + +*or* + +`cmr "print python version python-version"` + +`cmr "print python version python-version " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'print,python,version,python-version' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="print,python,version,python-version"``` + +#### Run this script via Docker (beta) + +`cm docker script "print python version python-version" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/_cm.json) + +___ +### Script output +`cmr "print python version python-version " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/run-python.md b/docs/Tests/run-python.md new file mode 100644 index 000000000..7ab1b2aec --- /dev/null +++ b/docs/Tests/run-python.md @@ -0,0 +1,138 @@ +Automatically generated README for this automation recipe: **run-python** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-python,75a46d84ee6f49b0) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *run,python* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "run python" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=run,python` + +`cm run script --tags=run,python [--input_flags]` + +*or* + +`cmr "run python"` + +`cmr "run python " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'run,python' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="run,python"``` + +#### Run this script via Docker (beta) + +`cm docker script "run python" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--command=value` → `CM_RUN_PYTHON_CMD=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "command":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/_cm.json) + +___ +### Script output +`cmr "run python " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/test-deps-conditions.md b/docs/Tests/test-deps-conditions.md new file mode 100644 index 000000000..4c0ee33ed --- /dev/null +++ b/docs/Tests/test-deps-conditions.md @@ -0,0 +1,151 @@ +Automatically generated README for this automation recipe: **test-deps-conditions** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=test-deps-conditions,5cb82aee472640df) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *test,deps,conditions* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "test deps conditions" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=test,deps,conditions` + +`cm run script --tags=test,deps,conditions [--input_flags]` + +*or* + +`cmr "test deps conditions"` + +`cmr "test deps conditions " [--input_flags]` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'test,deps,conditions' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="test,deps,conditions"``` + +#### Run this script via Docker (beta) + +`cm docker script "test deps conditions" [--input_flags]` + +___ +### Customization + + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--test1=value` → `CM_ENV1=value` +* `--test2=value` → `CM_ENV2=value` +* `--test3=value` → `CM_ENV3=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "test1":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions/_cm.yaml)*** + * print,native,hello-world,_skip_print_env + - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) + * print,native,hello-world,_skip_print_env,_text.SKIP_IF_ALL_ENV + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_ENV1': [True], 'CM_ENV2': [True], 'CM_ENV3': [True]}` + - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) + * print,native,hello-world,_skip_print_env,_text.SKIP_IF_ANY_ENV + * Skip this dependenecy only if any of ENV vars are set:
+`{'CM_ENV1': [True], 'CM_ENV2': [True], 'CM_ENV3': [True]}` + - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) + * print,native,hello-world,_skip_print_env,_text.ENABLE_IF_ALL_ENV + * Enable this dependency only if all ENV vars are set:
+`{'CM_ENV1': [True], 'CM_ENV2': [True], 'CM_ENV3': [True]}` + - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) + * print,native,hello-world,_skip_print_env,_text.ENABLE_IF_ANY_ENV + * Enable this dependency only if any of ENV vars are set:
+`{'CM_ENV1': [True], 'CM_ENV2': [True], 'CM_ENV3': [True]}` + - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions/_cm.yaml) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions/_cm.yaml) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions/_cm.yaml) + +___ +### Script output +`cmr "test deps conditions " [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/test-download-and-extract-artifacts.md b/docs/Tests/test-download-and-extract-artifacts.md new file mode 100644 index 000000000..c0b6cf1de --- /dev/null +++ b/docs/Tests/test-download-and-extract-artifacts.md @@ -0,0 +1,123 @@ +Automatically generated README for this automation recipe: **test-download-and-extract-artifacts** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=test-download-and-extract-artifacts,51dde7580b404b27) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *test,download-and-extract-artifacts* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "test download-and-extract-artifacts" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=test,download-and-extract-artifacts` + +`cm run script --tags=test,download-and-extract-artifacts ` + +*or* + +`cmr "test download-and-extract-artifacts"` + +`cmr "test download-and-extract-artifacts " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'test,download-and-extract-artifacts' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="test,download-and-extract-artifacts"``` + +#### Run this script via Docker (beta) + +`cm docker script "test download-and-extract-artifacts" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/_cm.yaml)*** + * download,file,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx + - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) + * download-and-extract,_extract,_url.https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128.tf.zip?download=1 + - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/_cm.yaml) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/_cm.yaml) + +___ +### Script output +`cmr "test download-and-extract-artifacts " -j` +#### New environment keys (filter) + +* `CM_REPRODUCE_PAPER_XYZ*` +#### New environment keys auto-detected from customize diff --git a/docs/Tests/test-set-sys-user-cm.md b/docs/Tests/test-set-sys-user-cm.md new file mode 100644 index 000000000..5edef2acb --- /dev/null +++ b/docs/Tests/test-set-sys-user-cm.md @@ -0,0 +1,118 @@ +Automatically generated README for this automation recipe: **test-set-sys-user-cm** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=test-set-sys-user-cm,25fdfcf0fe434af2) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *demo,set,sys-user,cm,sys-user-cm* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "demo set sys-user cm sys-user-cm" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=demo,set,sys-user,cm,sys-user-cm` + +`cm run script --tags=demo,set,sys-user,cm,sys-user-cm ` + +*or* + +`cmr "demo set sys-user cm sys-user-cm"` + +`cmr "demo set sys-user cm sys-user-cm " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'demo,set,sys-user,cm,sys-user-cm' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="demo,set,sys-user,cm,sys-user-cm"``` + +#### Run this script via Docker (beta) + +`cm docker script "demo set sys-user cm sys-user-cm" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_SUDO: `sudo` + +
+ +___ +### Dependencies on other CM scripts + + + 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/_cm.json) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/_cm.json) + +___ +### Script output +`cmr "demo set sys-user cm sys-user-cm " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/Tests/upgrade-python-pip.md b/docs/Tests/upgrade-python-pip.md new file mode 100644 index 000000000..cacd17ff8 --- /dev/null +++ b/docs/Tests/upgrade-python-pip.md @@ -0,0 +1,123 @@ +Automatically generated README for this automation recipe: **upgrade-python-pip** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=upgrade-python-pip,4343ed2d9a974923) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *upgrade,python,pip,python-pip* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "upgrade python pip python-pip" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=upgrade,python,pip,python-pip` + +`cm run script --tags=upgrade,python,pip,python-pip ` + +*or* + +`cmr "upgrade python pip python-pip"` + +`cmr "upgrade python pip python-pip " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'upgrade,python,pip,python-pip' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="upgrade,python,pip,python-pip"``` + +#### Run this script via Docker (beta) + +`cm docker script "upgrade python pip python-pip" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + 1. Run "preprocess" function from customize.py + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/_cm.json) + 1. ***Run native script if exists*** + * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/run.bat) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/_cm.json) + 1. Run "postrocess" function from customize.py + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/_cm.json) + +___ +### Script output +`cmr "upgrade python pip python-pip " -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/TinyML-automation/create-fpgaconvnet-app-tinyml.md b/docs/TinyML-automation/create-fpgaconvnet-app-tinyml.md new file mode 100644 index 000000000..c1644ada5 --- /dev/null +++ b/docs/TinyML-automation/create-fpgaconvnet-app-tinyml.md @@ -0,0 +1,156 @@ +Automatically generated README for this automation recipe: **create-fpgaconvnet-app-tinyml** + +Category: **TinyML automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=create-fpgaconvnet-app-tinyml,618f3520e98e4728) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *create,app,fpgaconvnet* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "create app fpgaconvnet" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=create,app,fpgaconvnet` + +`cm run script --tags=create,app,fpgaconvnet[,variations] ` + +*or* + +`cmr "create app fpgaconvnet"` + +`cmr "create app fpgaconvnet [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'create,app,fpgaconvnet' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="create,app,fpgaconvnet"``` + +#### Run this script via Docker (beta) + +`cm docker script "create app fpgaconvnet[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**benchmark**" +
+ Click here to expand this section. + + * **`_ic`** (default) + - Workflow: + +
+ + + * Group "**board**" +
+ Click here to expand this section. + + * **`_zc706`** (default) + - Environment variables: + - *CM_TINY_BOARD*: `zc706` + - Workflow: + +
+ + +#### Default variations + +`_ic,_zc706` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/_cm.json)*** + * create,fpgaconvnet,config + * CM names: `--adr.['config-generator']...` + - CM script: [create-fpgaconvnet-config-tinyml](https://github.com/mlcommons/cm4mlops/tree/master/script/create-fpgaconvnet-config-tinyml) + * get,xilinx,sdk + * CM names: `--adr.['xilinx-sdk']...` + - CM script: [get-xilinx-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-xilinx-sdk) + * get,tensorflow + * CM names: `--adr.['tensorflow']...` + - CM script: [install-tensorflow-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-from-src) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/_cm.json) + +___ +### Script output +`cmr "create app fpgaconvnet [,variations]" -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/TinyML-automation/create-fpgaconvnet-config-tinyml.md b/docs/TinyML-automation/create-fpgaconvnet-config-tinyml.md new file mode 100644 index 000000000..4f6b5eb62 --- /dev/null +++ b/docs/TinyML-automation/create-fpgaconvnet-config-tinyml.md @@ -0,0 +1,173 @@ +Automatically generated README for this automation recipe: **create-fpgaconvnet-config-tinyml** + +Category: **TinyML automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=create-fpgaconvnet-config-tinyml,f6cdad166cfa47bc) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *create,config,fpgaconvnet* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "create config fpgaconvnet" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=create,config,fpgaconvnet` + +`cm run script --tags=create,config,fpgaconvnet[,variations] ` + +*or* + +`cmr "create config fpgaconvnet"` + +`cmr "create config fpgaconvnet [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'create,config,fpgaconvnet' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="create,config,fpgaconvnet"``` + +#### Run this script via Docker (beta) + +`cm docker script "create config fpgaconvnet[variations]" ` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_zc706,ic` + - Environment variables: + - *CM_TINY_NETWORK_NAME*: `zc706-resnet` + - Workflow: + +
+ + + * Group "**benchmark**" +
+ Click here to expand this section. + + * **`_ic`** (default) + - Workflow: + +
+ + + * Group "**board**" +
+ Click here to expand this section. + + * **`_zc706`** (default) + - Environment variables: + - *CM_TINY_BOARD*: `zc706` + - Workflow: + +
+ + +#### Default variations + +`_ic,_zc706` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/_cm.json)*** + * get,python3 + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,ml-model,tiny + * CM names: `--adr.['ml-model']...` + - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) + * get,git,repo,_repo.https://github.com/mlcommons/submissions_tiny_v1.1 + - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/_cm.json) + +___ +### Script output +`cmr "create config fpgaconvnet [,variations]" -j` +#### New environment keys (filter) + +* `CM_TINY_FPGACONVNET*` +#### New environment keys auto-detected from customize + +* `CM_TINY_FPGACONVNET_' + network_env_name + '_CODE_PATH` +* `CM_TINY_FPGACONVNET_' + network_env_name + '_RUN_DIR` +* `CM_TINY_FPGACONVNET_CONFIG_FILE_' + network_env_name + '_PATH` +* `CM_TINY_FPGACONVNET_NETWORK_ENV_NAME` +* `CM_TINY_FPGACONVNET_NETWORK_NAME` \ No newline at end of file diff --git a/docs/TinyML-automation/flash-tinyml-binary.md b/docs/TinyML-automation/flash-tinyml-binary.md new file mode 100644 index 000000000..8d41da6b9 --- /dev/null +++ b/docs/TinyML-automation/flash-tinyml-binary.md @@ -0,0 +1,175 @@ +Automatically generated README for this automation recipe: **flash-tinyml-binary** + +Category: **TinyML automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=flash-tinyml-binary,98913babb43f4fcb) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *flash,tiny,mlperf,mlcommons* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "flash tiny mlperf mlcommons" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=flash,tiny,mlperf,mlcommons` + +`cm run script --tags=flash,tiny,mlperf,mlcommons[,variations] [--input_flags]` + +*or* + +`cmr "flash tiny mlperf mlcommons"` + +`cmr "flash tiny mlperf mlcommons [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'flash,tiny,mlperf,mlcommons' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="flash,tiny,mlperf,mlcommons"``` + +#### Run this script via Docker (beta) + +`cm docker script "flash tiny mlperf mlcommons[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_NRF` + - Workflow: + * `_NUCLEO` + - Workflow: + * `_ad` + - Workflow: + * `_cmsis_nn` + - Workflow: + * `_ic` + - Workflow: + * `_kws` + - Workflow: + * `_native` + - Workflow: + * `_vww` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--build_dir=value` → `CM_TINY_BUILD_DIR=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "build_dir":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `r1.0` + +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + * get,zephyr + * CM names: `--adr.['zephyr']...` + - CM script: [get-zephyr](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr) + * get,zephyr-sdk + * CM names: `--adr.['zephyr-sdk']...` + - CM script: [get-zephyr-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr-sdk) + * reproduce,tiny,mlperf + * Skip this dependenecy only if all ENV vars are set:
+`{'CM_TINY_BUILD_DIR': ['on']}` + - CM script: [reproduce-mlperf-octoml-tinyml-results](https://github.com/mlcommons/cm4mlops/tree/master/script/reproduce-mlperf-octoml-tinyml-results) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/_cm.json) + +___ +### Script output +`cmr "flash tiny mlperf mlcommons [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/docs/TinyML-automation/get-microtvm.md b/docs/TinyML-automation/get-microtvm.md new file mode 100644 index 000000000..54ad7bfa4 --- /dev/null +++ b/docs/TinyML-automation/get-microtvm.md @@ -0,0 +1,162 @@ +Automatically generated README for this automation recipe: **get-microtvm** + +Category: **TinyML automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-microtvm,a9cad70972a140b9) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,src,source,microtvm,tiny* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get src source microtvm tiny" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,src,source,microtvm,tiny` + +`cm run script --tags=get,src,source,microtvm,tiny[,variations] [--input_flags]` + +*or* + +`cmr "get src source microtvm tiny"` + +`cmr "get src source microtvm tiny [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,src,source,microtvm,tiny' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,src,source,microtvm,tiny"``` + +#### Run this script via Docker (beta) + +`cm docker script "get src source microtvm tiny[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * *No group (any variation can be selected)* +
+ Click here to expand this section. + + * `_full-history` + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 10` + - Workflow: + * `_short-history` + - Environment variables: + - *CM_GIT_DEPTH*: `--depth 10` + - Workflow: + +
+ + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--ssh=value` → `CM_GIT_SSH=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "ssh":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `main` + +* `custom` +* `main` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/_cm.json) + +___ +### Script output +`cmr "get src source microtvm tiny [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +* `CM_MICROTVM_*` +#### New environment keys auto-detected from customize + +* `CM_MICROTVM_SOURCE` \ No newline at end of file diff --git a/docs/TinyML-automation/get-zephyr-sdk.md b/docs/TinyML-automation/get-zephyr-sdk.md new file mode 100644 index 000000000..07c2df73b --- /dev/null +++ b/docs/TinyML-automation/get-zephyr-sdk.md @@ -0,0 +1,126 @@ +Automatically generated README for this automation recipe: **get-zephyr-sdk** + +Category: **TinyML automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-zephyr-sdk,c70ae1a7567f4a7b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,zephyr-sdk* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get zephyr-sdk" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,zephyr-sdk` + +`cm run script --tags=get,zephyr-sdk ` + +*or* + +`cmr "get zephyr-sdk"` + +`cmr "get zephyr-sdk " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,zephyr-sdk' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,zephyr-sdk"``` + +#### Run this script via Docker (beta) + +`cm docker script "get zephyr-sdk" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `0.13.2` + +* `0.13.1` +* `0.13.2` +* `0.15.0` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/_cm.json) + 1. ***Run native script if exists*** + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/_cm.json) + +___ +### Script output +`cmr "get zephyr-sdk " -j` +#### New environment keys (filter) + +* `ZEPHYR_*` +#### New environment keys auto-detected from customize diff --git a/docs/TinyML-automation/get-zephyr.md b/docs/TinyML-automation/get-zephyr.md new file mode 100644 index 000000000..e36cd805c --- /dev/null +++ b/docs/TinyML-automation/get-zephyr.md @@ -0,0 +1,132 @@ +Automatically generated README for this automation recipe: **get-zephyr** + +Category: **TinyML automation** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-zephyr,d4105c2cdb044276) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,zephyr* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@cm4mlops``` + +#### Print CM help from the command line + +````cmr "get zephyr" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,zephyr` + +`cm run script --tags=get,zephyr ` + +*or* + +`cmr "get zephyr"` + +`cmr "get zephyr " ` + + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,zephyr' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,zephyr"``` + +#### Run this script via Docker (beta) + +`cm docker script "get zephyr" ` + +___ +### Customization + +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +#### Versions +Default version: `v2.7` + +* `v2.7` +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/_cm.json)*** + * get,python3 + * CM names: `--adr.['python3', 'python']...` + - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) + * get,cmake + - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) + * get,generic-python-lib,_west + - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/_cm.json) + 1. ***Run native script if exists*** + * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/run-ubuntu.sh) + * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/run.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/_cm.json) + +___ +### Script output +`cmr "get zephyr " -j` +#### New environment keys (filter) + +* `CM_ZEPHYR_*` +#### New environment keys auto-detected from customize + +* `CM_ZEPHYR_DIR` \ No newline at end of file diff --git a/docs/img/logo_v2.svg b/docs/img/logo_v2.svg new file mode 100644 index 000000000..fb655c627 --- /dev/null +++ b/docs/img/logo_v2.svg @@ -0,0 +1,6 @@ + + + MLCommons + + + diff --git a/docs/img/pages (80).png b/docs/img/pages (80).png new file mode 100644 index 0000000000000000000000000000000000000000..0ca65735a705ff69d234b02ad9e116e6caa61d0f GIT binary patch literal 242952 zcmeFZ1yCGYxGp*|h@c_3Lju8ry97;w>)`HAu;64va7jXNhXjWSF2g_)G`RaffZ!Gw zTwl)&d!KV|-8!dU)xA}(>g}##XHU;`_v*j@^~?8ttHV{4AwijA2W2=pR6KHf?tPKUT-2O+ZdPU4|tsEX7F<|l!l z@N}OJ6&FaXrmM)}qf!RDG`FJV%;- zL@C~QmF&~+bm_W0>)L$5`^SRv$xUpA1tiak|^?z~AY0s|) zcx}q}HM5?6T-@k2@F%bNB~6q>xS=hya~`T4aVb}H#%?D8)H5N$rw`$+%1w|vj!<<5vF0J{}QRY zSr#mUx^a8XW!nr``1@I(wyg9?;n1~S(A{(Bd-XdpyBT%zyFtHw)N9{ zU4gH&(^c>_?rT1GS0UJc;D*z16CR9_9XcSubL#@zFBm`R_OQtv?OS7^lg}_c-ucZR zIP;|4U-@?O#}K$&sPYfWKZM78B3KXR6aP@$#H0Hm@d@7`INBK>_c2MhmRf}ldnl_(@L5Xodo{KvVO|&ir6Cjwd_guae>B&mIi0hefOKAZ`WnuQqj561^w1s zx>Th>H4=z{x2x7}L=7N=7)R;aTu654VBD%F30BBk6O*22eMvM56qek1B(5aGA@xzD ztqR7=U$slHv@q@79efKh87{vw@oMws=95i4kB4Vea;;4BQO+@S#`+K0altWm?FOyb z^Lfq>-ncXh&EDL38~e@aN8&H?Kk#GW^c`}ozKOrw3 z7vc4W8@6gMc$nVVeYA_=l6L%7`iV_k-Df;C3>%I$fi}l?)@A+`1BxR5&vr5{hYFBHAPQ0IBxu21vG_S&^^7`r0C+<(epLARr>p}#;A_~66 zZ>o<4cU5-<@1+E12Zu`MNS8}jwkn3(Feel}ch8blK@@frfC~wDg7aQ24joseS7A)? zPCcfSvKsW-7x2cvusQ`9JboF^ldzxIlTeb-NW)}KgN2O|`{CPDlQheyXJW6IUbuXw z`*QfA`W5!eUit2q)34-S5x=T^i7OxQ$)ro5v$cCOaqW{AUF65ngoBU0A4TbW6g{+J z*sYj45?MY8vmL#lN*GT#N|<8}P-rspS9-4c{UxnTw%UBrj}khmK`ly&o-iwFOIv#*!FxhGn@m zD-97%M6!{kgb;V_jWWIxhaAdWgHk^^%rVZ<@KM1M?z;Q1E?>=|qEpS>hR1+EWNIir$7WKs@we z4({XYd_QUiJtA`3!WYwiH&$?eC77t5?sa9W)2mC|!uMf3lRUeDSrn8h72l#RN7!NA zK_vGfE_~QBzq`-c3cPaBmn(9IDs#M*pgyVI-et}GN&RV^R=t?(um?e%JG5n+eH(M8 zW2^|Gs5`xOL*xw=rbxz{3K4bDfP)`zyu3AiM34CP`wq4d*(dhZ{#?&fgG*~0JGY{Vgzue6nlcPA zSMa=M?qV+Du@#zPK9$&4h*DJW>al0#uVFrPPjl6GnDK3tJKsJ25*!#@Bdu?0y-@$y zR?Szhn6@)$@0Z)4YA*^nC~Hxv5G;cfHG+pIb`P z%9}KsVRO5B8P5%EW~b9{2tyHsd$lXdo3E!v)o8S6o|3>F>Y<02Be><_SerVq{MY>} zKf-?K$zaGV$waoDFX)MrU25+x4$T(8Zav9+B6nzV_+*QG(y8)Fx=`9iX|46f(Je}g zo{Aq8wV3==jXI9{hCX4=PZ!^R{ZX1ztXBHkXvtJlVbrKl@qy;}TG-iUz zrc|?lt?1$n&*5s2P<}&1d?XO(H#g29cet8Ear;{lj zx;`eDBjtrsBc#JjoA=krL{?sxB$@$)7Sm+Rh{86JN9U8YtC^ z_&ho7lJ0NM=m6h>%Sl$F8;p1nIBxMie`0_NOqOBolijU%rkCtL_66~!*O*E2FMW?#Hr<5QC# zE#t9V)NCTNVj)y4zI2zL&kN6oPv@%rod{Cyb3{$Y9*HBo4&VGGzPlG&B~HYM$c)QO zRAy4-RU}u;%ydbsKQo0#{!C=zAhvvP`RTZFIexjgGGOv@l;i#*#U`Rg!&B&SbWiG{ zQ}oX=^?g&E ztD*)_IMJBBP*Vhfycs~Cz+e#Q6xbEG3Ie%51A*3!K_HH&p^VG2LD&%3!YE!N%vg0S|O+1%?F2K$oyr9ZDwG@52rCJHN2VdWD9F|VvPAaMt61icb&UNbr@Fk z-Ntbx=02O>QzIWw<5_Y)B-&?v@;jUO7BLH%_lGuP;kO~dwUF7iortvQ(Us#*8t#4S znwkBQVMU8YE9X9ag8ogXqB~otjq(OCKJ@nD+nGE6-ycArU&J}l|9p~=lqUb@mw~}y ze8>_Lz1_oixc&G4gC5dN-TmjoW9cgVe?Ghjv!?&&!+X3z*?&IJ(EfWNz!?9ZGu``K zcm6D@fQ$T0@$(Jwi-S!6(?5_a4Ntv)p4_Srfto(jw{5H&()&% z`3f%i)x7vPysijVi`u}R+(n0oQ1Q#<72JT+%Z{zgCJ2=6pHuBakFGWbuZ}LRP9axI zS7$p{r$!6$xki`k9CFR)zxp5nN8i&M^uQ-$1<-#S5d>;p9kyPay_zDQI5Tpu{ST*% zp%8riyK@uWc=!E{OSU7RlBYAIY&A7 zKRJEMzfKR>>fdRMbeMl5AZptD>kR)o1IiJQ|M}M${&j}`YtA63>v;=#^1y9w@c_i~ z`T5w`;o+Fp?#JR{<%|*_ts5f0%}TwlMcE=%7N@4#+oyEl4yXdbJNjPXKb6A2B`eZc zpnr>Sq!s>k2Ba1Kbq16b{&j|bo#Fpvnm~5VD2uL&UH!pjR61`FAAut&I#3pWI2tx# zJL?EI-|G%I?`Y6NU3#B^Kn<1aPzU}Cs36Vo-$6t%V{qh80)w>mrYC^E$&+npdU|$t ztT+`}N)icmW>Nvv0CX_Gf&>Hx++k!!5R#VO-+4$O>@ro%*^QN&CJ|u$+FbkF1&6o-}Ya3w%4!L&wnn z($`DytP5R-!MARL0{uO!wlRPk1byNa65`?#YJgpIh&wuUXt;2$l0a&OL|G(ihgoiZ zn}>NJ4!2VIQ4r0s9@=o=h|ZW_I1+z9C50E*$TOj~{{xK$Md>w8BtckHQ&U@8!^dA+ z%g^uW;o<4&v2c)_Je+T3DJv%@D=W9*h(sp8eV|8#L!o4X?h8;e3%%c48C8e-J!cXsODSW$_9dkPCre(#=~ zR2EN9rzAT)Ud+zk%cRhw5xYV=$j><(My06e>eCZYTh2Jdl@L7~mz1w?g?Gl{lh)zU8L~ykPAYGy_i%d4 z*z*Ux4`Hm8vXZ;+2%ausdiq~Lakod{2}R+25kTz! z<0SRm)pfkAx0e^!s7*y0Jw5f^=`p@-&jswSUS2HEe>nz6MjH85*O5pGhuS!JygxIJ z2(3uD(i0bcN?09GK`3AfmANRL!0FALEMps(sZkQ>c_s@zR2{<=KR@XgKOd++c<1K3 zKj;*2HYgW*65N6-wQe*0WJ-|#sEydw)ypL8uCKWrC!dj(S*&@jMB~Scp5xlc$XZ7> zb}e=KB0;{9MnilkYncGa2H`Ywj#95DeL25be+^6f;8urMut-;^ST6Q1_aV48%>`wt z6NjXu)(_Z}venJn!*^fEe1SC@>(m?xdu&dWn{}}3A8%GZAYLG^?C0vm|1&W)`Ceb^ zY9_#&%HE%U2{DMENv8pVqeR!K;FPtbEkL;F?qz71pEvzXQP6GUbJ5aC=) zt0$&hR#e0c>jSX#n`W$A`DO+A z8kYAE$QklN?KfFtrk7`Vr_W@nIK7J|xOO3XId9r{=@;XKReAX+GcYSp=Y4?;lLc)w zLiV>=RH||GPIxRoh(eS9Dt$vzBJf9*6@xg}QtEK!@R0hd4hA!VEYTdKnZ9SjC^uDE zZ;Nzl_S?-FWfD8O#e(#cq~?p}WAgcIAw`P`jqF?xT@>-*@z^2F z2eZ!o81Zh~Q>L(}ne}gj>eUhvr`9U(<4&rWC0~5>j3v7&s+Ecc0*rh2$*LC~V1NK* z13%ztipSza!h|8IOWF97ATxpgDG8fRM$x9Rsj*K0O{FzSp$=B5jC>dFJEF+@6kbvS zXQMUSaJ#w3rjtSnOPE!3L<8YnhO~$NBe>lUs9S40oOSI_W~p9;Xl*GElM?LBHjKWy z8Z-2s5}A}RF~t75saw0@bCi#Fu@G5bzj7VOo&Mu_-4%tC}Yj&s$js!z$M*Ije$nu9%=&}voFzz>aF`EChw z5%Z=39`Wkm9NQ>7s_5c;KDPPfq$RWI%H=G7XT$LU2sFx+;mC@Ci=&=Ot2~lJLJGuJ zS=p4i==tb^XM?jl7pu8*mxsBrd$YY|HVSJbLCTy}!)1It%B^Fp%(#@66D2xW@zGJ| zmq+yUZJJP_-7T+_*l)+P1^ilyaB_8i-U`cgM;hoc$d$w z?Lu$m6rrUzG%f0&RbpT$kW1k6n2rhmK9k|zvkfXyO$}T7?OdU9e#E1lrAB7Qts#RB zFN#oApAv9?ouzkMiT9Bz#oKa^^vlDAjIWtH+=w?+J?mS@>0D|%E39cKdIOX-F~|{c zkeF-a@4PbJeD+HT?oj9p-*5zStp;rf2?C@1$7pdPhY@Tl+$r{Z5y{htHCV+?y9BtZ zbo6H>4o8UOQGTsmfO`T7R#K+;>&F34?YvGVD=(P(rw(9eqfrcuR+bSgy6y|pk$Uld zU(O!|!yXG4+QA*x9Mg*l6Ps4`rZkrdhP|uvu_uL;y}I2Np0XKz;G7}?&ucX=A!DU@ zK$GCZ!FpISW8cxiL3H%N!Ez~O#{??ZYa3**xWym}ZX`&`7u7>MYhFQ;;AHMwxzn{f zvoJs59r$YGl=}|xQG?fDyZyzKcd<|2H-vG9p(wQ|E>L&wV<2n#0pf^ap*Cmv{CwY( zjEuCjj6NSvo3-6V59Oyi_7+Pgi1>Jt7aKEyuaX1X`ueuF`;-{R$_MM*)(oVAMipL> zRLm!B2D^;<`Un{|`=1>iY&7T%K3F&}g%Unm+sfcG?P}xvu(~uwUSn6?XE%F$vW*Vw z(~%QBv#KASiTCOL+PP0S3%JYx`ua=PVK8tzb$R&%AN%0bZcw+F1f|W~yZwZi3W&U1 z(WD9Qwhi)fw(4?GT)vGh1m_!+3wt0-qW!ushDVrz5kWV%)bu%+btm*&a$R#gWU!1z zg|l9N9e7E~sjngOf>f_5g;{R$LeDa(6f9#$6|;nu=SaNr zi!D%3in4?pMuw)DX*YDey;^uCeQ+x$y?|nPUDs1Lc|Fjj%FVH;2@quVYiTVUHRN&L zSs(Hd-MTo7rGhsbH7ewrk>XPOUmUcAw)xov=XWb#ksV=qefX}TPgzPPB^4BOVy6Lq zR|7mzTU{vmTf1cL(pzQ(_fcbUhFb&|awx49$)*<|RJ0TJhI!@im_lseVN^G=CU zD9HlCsPXu4nwTR;(j=H^@d#bV>6J=XSM49~Ld$L*gm8D&X7@ByfrUD}R%{&y*@5Bg z^YsT=?Dt>CdU9k3JW$zno1+W-+BOkTd0LdlTow|lT7n7*p1-yART)VEc=YanmOot)lA>mX}ROjcf7HW3)yf*2J=Dof~ zT?(Qnpj$4w6Q6>dtEjm2M$L{n@kX2CagGV?+zr3`qH(%xD_0D2zY=)#wKaxKto&bW z4op6@gk3iG1eY?k;&*kZERa2lXB8gL0L=X85T|WOP*;dH<{MAG|8|+!$sZBigu&?* zGw%MNwXm)8_0jF^Vz7Jd5^?YFhi3na!>x_+QDp)nR@`)to_ z+HW2;E}Gk{4d#CNLh@Lk=e7e2OMgabN$U=0*r@n0>;|l;=XOU+OHfcti+gPquZ@$IT#6R{1fwbI@r)YR$e z6L+h{rrNoini-f(%U)mERjaFDyW9Q*>Z)?1?WjA{RH?=X1yX5<`Qyiv;E0bt0?wU4 zglc&@^(>K*_{*0dY?9XI=sB}rwEknTSr8_eB7MyRu3gPp#tNT!z3PCjjh7XT$yV%J zGjPNSU*FE(%kZP?o3Pez7zx-V?hkr|5YbLtO{5YzUQWftr4kW%{rh`RgWi5*!BVm) zbB>j^f#06Fd-e9%4Uoi@8Zac#sxaq!eSH?XkThH^9XUT7kPzENiuph$-Fg5$`CiwVY*7@3?pnY zR{dZKc+il!p2X3{)=y5TOOcj(uv8bFRucc3gMwQsws^IzHF&@p3jGden5fRj(l@@Hx3)>dgLNlyWEbi@pq zudJ-*ct0HgAugoCH2GLGkM%UaGQ|e7Gil$4e|LOFMh>Hr1+{n)31*q85$bZu2V# zEbeib^vp`#1e8_n%Fs#JJ>>!PNIP@TUlpng3%Ln->k_fSuCS@($aVmk$qt zFvRx7e|R`9ZimcQSI6tp*W$~`0ZD7bd>P3bE#Z80{gkvW5Lzx?ZK1kEvKqW+<;ZVn zVr9v|I9Kh#BpuOx+hxJ~6LB+0h%qlZX0AIe$VD+0+EK7`{OddaoXDH64X3|8#&nG4CbW$pwbl5% zbhw8Wob07zXY!jd(oy$91lUV!@L2h$FeQR4wt{ zJD(#WBGZWCq_W!C!~J}&mdweuJ7xo%!Ux*P%Ai#bVvY=9&t219E*F_%e%5R7WPiw_ ztQ3Ee6~|Gw-^gk8b5HOujo@fV^w&+_ixK7Fpi#~cHaEhsPh`ohDu^Uas@>Vx&s z?nHrp6lz;lYR8#_u2Bn_1088i;L%A@PvaCGhK za%yA}-?1YGI@+3cll^EfhfYIG+$$LV{d)AA4zm%>eWIFJZ1B7#?Do<-;#A1amI0&@ zM)bY6tqqA(;9f<0HEak#@}5|Ti60vCswZp^;*Nb(I*M=!@o31f%a~X!PtRo}r8tkh za&dkp{Lb;9^i}A}_Y!{rYTR~IuDky~0H*6z;$BO{Mm@bUk8`||Nt`2?$-G>mMb3hF zkf^K$W^_f1O(_OPMP{;}Xh=uSf=uzPgu#r(4or*~YA33{qM0A{-r`HQe(Br&=g+TS zf1FPe+?23vm$55lvRZGO53qZ|UNus|UKJ0ft-j0w|2(A1SBj3!6w;*lF+5z;gd2db z8nW*0xKZfVz+Hy2iGdZdyO!id|#(Wjuu6(!m@jmkvA?Xjg$UT|m?YnLbHXJEI zpo{I*#OBlPK@Rac#KxSTbN+UXhGLZ}GP4B`Na^g(O?usW###69>O1AFZ@t~E5=!Ux zR8+#}G^V$i^78sJ(o>2oyITX6_;DA%41etVsn8 zzrCI*JEg^9E_!`M)%5Qx{A2j2P;A5d2$G4&Onj}dQ-aB-vK~oA=OYHHZP`FSUpTnx z_AcBW7QJ`1ey~yNWYp}q!}D&f>i4q>^(YHW0nO#Wb;=U(J+IM&BKwyChgE}-N(#Pb zTdmUidSFy-_jCJ0*ip3*wuSE=8a2_iXr*N-yR%z)(l>9sLAcOBaP)W-TyTGl!a#Ru z_V6~$bVc{!^x@$a)YECsakg*LSRXnjG`**FWPf`nh7twODk(c>iuDJi1t3KFU8q;e0Na<#i!N;=BEmO)ssM26i^OyYQ(z;#Hjs1_C)$UeS zo}N~WW%-kKcKhXMqH5Pi--O;x&2MDK*bcnhM?1!D$dvJj1dztJLGY|RZ(|kD7NC{67^&}3kqy)6RJhR-FL#Q<84#LI!rS>hTn2iru4O`};UU8md3re63n zKO+TY1)O}&VgqJo%Zk%M?nos%jl~6pgKsmMmI4~za9zjV?DnvGd3d&1_yggG(!VOa zk_)6s>@=M0kTl+3*Ru2~d&liaj^*VPBPSkq16v-oyS$b%K2&1oryHTTh=l730r~|w zD7hG9zy&r5`5d*yRDP$qCa*X@(S@MO%9Xy}zA<=vGY9jl4AY?zXntg8+v-v(|UUDwK!R}UNAaO1JM$7EwM61>UBSf0&Kixt2U1Qhi zmb<*{tTQm^R~1k4t0v$B2LPxw&YW-W@2?FH54+bQxzAsn_}5y0>zBb~=(JpZ0uSR` z)|4%gZ)Jz**L!Sr#V>nWSt?~_Bqv|fxBu17-)=}#IC~t<)K5+>d!qAqGMY;Do+iz1 zw^nxj_1ewN+}w?aBkvw?Q5E9rYYJqcBRjH?A&=hNz<-*SBIM&?YwLfx+YPFo%^KcMs+&hL%01JHTUi2U^u2Am~y)q?Ki+^ts^V?#p#wDJ+a zW}&!*^cX?eG)}sBVs_ud$#)NW=LnzoSeL2*a|!f5@8}S*)m`crFtsV>V@o{nSX)!pGXp-=w7)&Hs zF*BuW=znPMCCI@;N?^rLmdMUa);li`oQOsdkjsx6#rU6mPdB>S4ia{+{bHLFjbdCu zAh&^`pzddj}%}-B%LY&MX4R1pC-y`FEI{j!UDOR|gCHM~zc>J+PD=u7)Kpxw7G^XeRdSV=AB zj){qhii#2QJw6!6rM8rcrcH~VPe`Z9$1h0hk+YUi5X3{u!eN&HUcw(8FiBxvRj@CR79S@h%)xxP9h!^fCc(Y*x9 za$njPY$K6O_Z94AaVi@sFzFwWA*VEGKoKpqIPII6K~`c*qw0;WD`Ta?o-vfR*4poysND_4cCSUAK^NJ40Vg6h zJzZ<`j;1Z*OE{@QKLr5L{~4FBoLVfRq!1OWtk4QALZ_jDqU$6P$ovRwAMwnzI1`A9 z#2if*hi#D!ymgwJ?YS7lOMSArMq_YMZ z(xR*cjHPXP#c4Jz69Oefj!fzAffz6`U*{uz)Pkj~^nXsI5KOkRTxO7(T8vtMG~f>i zK!9UG7BXxJ=&5pCkII<|wdbp=ot>+z|8K+}!opCfu<&jd`l;if)HEt5$}wzsnODob z)3r{sznww}7~9)*FMP8I7VP0OGqbZZJx#z?Ufx92jEjwpgM;0R@_fBz?p$i?G95zE z*`A)YVQ%h&Ls04A{NU6oR4JW|THL*;Osr7{2J@yMKLgi(_%JxARFb>#p!Bt2@U|#N z!;+G4&H0ZXaOi^vyOrB)<4omp3$rxaq^8Zk*)sboae;f@+M7jfhVb5ksebCq<5Xa= z*V-9%ZNO>Mb`9`@N6DGZUatOP7>A~TjX=ZFE{vuN-aK)l;lntjV;-?alH?Tr1sowH zx#?|lhW73UKjTJ6!^3xX!^0abOdqha&OmFwer-J8oZNdo`_(&x#5DH3I-v0w>7JbO zK6!0d43o+)Zz_)|Pjs58^S@}1))Hrg#+p8$$V(qA!o+PlS^gg6QOXoL=!>%)t}Wru zx{OS|A)%O~J1lHJTs+dURo5R>uZZf&!;YkjvNm-MHK5r=HX^0PLIzKoyP0pFR zL-o)>C9w&LlWbCa=hXPLXVn|!FsBbZyqzhW?H>WSme44Nd z>mHAs+Sz3{a^zXCLh#a!@kr7#-42P+94?kSX8lXwOOHOGCqyOpNqMZkQNc$5`>ped41hvYQ-fl97A<*GP5^tpxunR|< zokR$i-x1=UH1df3luRQ~bB?Z})00jUm+aYr$a*(n&5h6s2&^7!D@D;e<4ApFpg&k-gsjD9i|lr^ayR)% zR04(N&B+pyg4tMVzXMGc_d!zPpzooiZc${>Mjql6hLEf)u`B@3y5{^-z!x*ow4nv$4e%* z?NQ2`tS?Q(4(Ds0t`9WxpzLN%R)S<;cY5SJY3p!ZDMJ~%WEPPVuw4a}NBGBvnP@mu zke*!Z$jMImTreY6@RnFgfNDhL32LNd=Al)HgnDnF-7W?T1BP1qNq!j9sbtXN!#M+` zHnuH2RVB43!UZ}kKqxmey91AXsHD`yvOf^+#NnuU7~i5Gdasn(^|MSJ?dg|-B29)U zZ3bhvBqomj>q=}0_N(MO)kihqBk_VhffhkD zdLG#c)ifuZL+P4hsXdotk>#ZT7J8(Q#_hsQekm#65D^s>6ckNAay)490a(FINJ*=+ z)5=W`3ugT7&CP8px4))Lkudg7gqo(HjSXK?e7ag?ic=_84oAdnmM;mXFurdM<4^y- z@67HpSWh`vNp_dkTfPqiJ^sqk3b;%k>ap}He(T=a{`w#!4^Rad7_AcZMOvl{aLm}` z+oN0So{w#`omzzBc+sx3>q%oUWYjr+hb%rMzR(d71X+1?KMK{D`cn7$B`{3YRJM@O z4eSZ`vL8CFn317FX5F>LuN2ESFWCs7!OSxIw0#{xG8ptFmC&<%@vZI4KP`ayT5xyI zzDq`1tlRhY`JdWQ`hoY8y`~v!O!WPV>@|g8-kZ=zx@t~$6xDt;T93^lex!Sxb}2uf zoi)o#lr07yw1lJa76XQA0W+ zhhWCWGZNVgcxv9CgD_5A>UQ2-1*{L3K^ACPbQbFsQ2?c;>L8FOa9>?SWai#zYfua| zHHmyZZ)Db9?qbq}QO71bwaEw}zLdRV)prSkete--uuXuJ6$8e5zRo9XA6Pr%e(dVx;VNYBFcTz16!H~g4Wsn~pIv$KhCjBr=EDsZVtqY4ulDod z@ar1-*zX!;+E~9jJ(#YA7>&am{=?Ne;;Fx~mTeAAdlx#YkaCU~G>+km+wKI`dPl1m zrUl_+vKn%|yK8w8rj`sEnEx_dod4C6Mu0=;d%8z$WG?5sEQe$kG2nEXLg+Pb-bN?S zE7t9PYH&ZH^UBLt&dkVQr}}7wqS}`J00K^K84VVUSTxGHK8vC9d!%r8S3Q6I^su)# zk%OOqV-{npwz3jBt*oAvkeon#s>qQ}yj^wA<+1cqTiv2V(=5!p8hW;_31(F(iOB63d zO`qe|@>D;*)7GfC0bssWQS{ltO$iDb%fja)EpCZ-$(mTEj*oYr=SJ^3Egq5VM`r@% z!{;*cTwE6qPqXeDIPK{k9O2^2*P?xaDzX5wNAqnH`4;ORiYEauX}|zAtFkPmq^JW# zczDq5Urmjv7S^_GX|-19?@_zp%MbnsT%gVf;n9NpBu~P~3DZ{6MPyNQtG+Z*pJj!aNkAITO9lpKis%@)F9D$Ds`5WZ zjIz~}{#O1bdBNltUc6IOqTn;+VIXv-QUqk)s%Mtg4L3!P-+WL>US z7d3q-r=-T;!hQ1%UL`=}D=KPml=A!t04}cUNRogOu0_xQV}`V|bEoa)D0r_hzNYDP~UC z8d@VdI-PN;VfkJxq#c+dWc^Aq2FnD2rWWfj+9ttHB2Rp_S8PzA__{5v#L7M*{SXOP zLm3v!3H*c3yrv*7@_|CjtPhqJAuEXqRN}L- ze3k*RBw8+Txva)wFo#i-clDcOuK6EIPYFt*E~PU;`!$!1!|cPg)MCDwrP#ik6W_j- zn{C0la&zhFb91*YdKwP_pTEW-m;j zmv5XHMT^!y+lz{lM{rN&THl#f95-52>@G0N96%a9a~bQ=L9NJ%Tq6Dyp>%e<`}XZ( zM`R)rt9LLmm{;n+6V5IZ7YCf~*E7@&+T!8IEj<5@UPM!DuZeZsCgFrtk7IN?=WUaMj?aG2oJ#NYMkRpcgG)iQ~s2ZSfh!g0>s4pfRjA7MQ5Jje)y2 zPF~N*(B!;rw(M%!YI&xZ-`?-F;l^3voaj*nUoUh!1n7xI*jUPVCdr4^MgjRP-0z@h zTMJnK(ieFN9gY!hZ}jBnH)@)h5t(4Ll6q@l`blRgac2gfe0*u*K^viH_XHBNnEMFV zh5;B^78FH>j4)udbV-{>u)L;}9ia?&Rye0>$r)8Pumv?uAhD2y2doK3AN|#R?ZE-R z6ANkFkD1le@nD{YysfM9*CasSs7Hh@z~FV=?Hv)m=MI!s@oY?Drj>6=PU8?cIxBsV z{?Os3H@h?Sk;I7EXqfZYi@#wLYF2imWn3p9L5Umc+U~e>mwhZxTznJ8W<#C8tUyTsToY7 zaYq2<-G)!sb_ZBW(=J93fE$7|F=Or2U7}kKXG2^0#L;Wf7UByQNSlcw}ZgmMv>9I0EO zhe)G!;m{@)+Nv*eS3)`hO-J6^@g_XlLTj*gm!A(5yqXXZfQKiv)?%UJexlRMmDT|t z!_Q1V6c|2l^LxqpH&V&@7Y{b9V&)L3p3nNGp`olLN~bfEE;u|Opj}2jM;2ThpB&*6 zqH>{=44-ldSsrbTFqS*Fug;I&N+#ZqQjTQ)7@FzdQ0sHxru%W~;GHSV4_TKfWfH^R z9YT# zJ!j9mA7;@0fdD++cSjS*$Q1xTQrrxz(+Lu8zJz0X0&=raX>u6=6%CYb`>KPLy@ZX` zfoFgP%@E=h^Iz8hRyH}4h^Jz@;EKN>rOiMiHARzL=vNW&;qv8QbJP3qf4J$jg3C@^ zm){*a1Luxa`KN{8VsVyK87xlko#k)RX{G2zWcauaF*~izU5C?N;I}QUPk_Y*iMB6c za$GuV6t{GT$`Cnv+f}#>Q?5&0)pp>r-|z04!E&~Z`JRytD|b}0mdRPKGe2%5?wfS9g4es-%e4jJDgYLu;sFb7-k_K5{W+3h zdw)zC6v?TdNSV=tlb4X$$$@c~JX$`HELsU1+%`WhDUjFW6Z#<|N}|M)gP8>DZqv6D8n2w-m4UT@{DMU-Jl& zJI+G8D@dIEk$SZ%5@F8jZhfzZC52uiMhG#-3`)&ML{1s2GWMwG*LIJQM!+bB=O-)Q zS7`a{Y-;>cWmCWw4^-M172XQFhwh@&3?Q4-Z01+N43eQ<2ROovdhc9NRCd1CSqBjn zsuzkJi;2k);1u#c-lCo(cV}6Qw_obxIbsEJ5AJEbZLW3hHO>tT4fJzu#d`7o>MPVb z2T@8qkXtd@1J@m80(7pWrCOlT!C@_BWnf(i=>1J(Sp@nOI5x(|H#WBJct?U`co>Kd zpn*(O>I%GNi>k5z%Qydnas@aVxgG2MZwN+oeT)79z$Ra_HJjh~8SQuj%qV#E z+VcO}8fl3CZsj=2`_b0=_r(t=JN)}f7^E5gb%y`Z8UBu0&UonV8m%E24LEB)?=A*k z?QvKnq@lRNCK#}6aSLEN=Wwu)+ix~*9v%S!9v<|Pl7nfK>;R1?1C6Qag9n~F?5rd3 zh(~&F&~GppfgK`;^X>VyEU96rwe|S86<99~tR+|V)EnYLy=}m@8~H2r+9u!;vJ>5C zGV8#=h@k|C-9i-z&`*Ov4e97W=~jN1_dep?U2j>@Zq&+iS{i$>G(XM#h-_{woA zKxYY(aKb}LR|2U9`>V;#;EFu}j0*z6o+G<-fZuLK5yRHan(R@!@v60e_#e3&v~^8O zi$2MTzEE%enrWwr4C4|IRQm=B%mCiN;D7gACXCxwa}2qD9&{OSxXoA4q7~R+h5j0R71IJ)y71THw4N;<7L2NJejI;ep`w|Tgq3+{WRAMzqfuc8C3KU z$&KCuDV-zWJ-NW6JG{0}6P{vPZni7jY%(>>@=#3=iLrvR&{_o0{s+erIEmSIbjTEP zw*(rN0VEeW*{-!35>aGvs!kQ#<;d?q>1Y)NpeD|VUK@YD*-jRR+%18)zMmcG0vD6{ zQ$UB((6QG`=S#}Vut9|^oj?OND6|JO0ANFMavdGHYrt?$UbV7AzgY00et>iRgK;9F zG6VTS4>O)@rDh%Uch@?Yf$wLXv{3}avrs{zrUA^8=hL3t61KH?AK;$m*@;^H7l19yLtlCVAU zbTpLc;d~o+tKf7S36Dzo;7*Nt1I_#Us45Wj%T7rBC8w>FYum@MUp&$+t*|84YGbniJhwlUGWHIKz-wgS{4LU;1MB%gCY$lF!#JsUgXIJ zw$L^WbQ&yL9#p->wf=m}PXYOjM8iMd_*Sssf^MM-N)!3BE&A(xPEN zjix&_>N5Ul9Sun1Vf5$RLbpLa4%q(O<+MD91+@(W9#Y!}+Gj-HmoaWmp&L(9U%(M= zz%k_Nw1xWe(CO+d;A*=0VjlOZkaamAw4}w?)FO zXQVPW8%yYp^XkiX=uO6(vw|6K=BfCX5xUg;nMl|r(}eeaZw8#Yk}{8=G^D-345mSf zgQYwgUfcHP4o_V@JbZjSr1?CU&o^`C?jZD%lV)wnvr`JV?N!&bxmDLe%x@>LIK~R8 zYIy~Rqj!D&oOSyMuY;44*R7Rcmdvkw+dqSpH@_bN3(=`9R`r}QlaKl5pN5E#kKoO2 zG5y0pRfL^_o0a)+b6k1DE)~z!Zqm%)MmLGh`#Z}L7x!i|!_CFTcZ0E2Gd;cOnbtVF zgCJhf0&A`n`J@ZFai;utSx!^h0d3JNj={&!T{#DBm<2RrX*8CxgvJuU@&r0TErl#9 zuYgeiuM%Sv%;Hf!8lZ1|JSdPTux>ERl#M zfMaD-6%vxXJl3} zBm!fyidzfGIy#0~Q)OkVZH&q?j#u^N<9nf-2yBH})xW(T7!V5(mtp^{7989@^OAXU z-HYNMBXyFcPv|6TWV&J?5#XgZzu1mSX88H|+=b+1MY6-mf|+;BZCXpG9B3R!7jzt zn3}ch5J&~a#YhOIam;6xKSuc~ns=4N_cq9+M< z9-VF?CUE7qw8V#n$vF2Y0wx+3mb;9drBm%H3JW&ct{~@`)dWl%6Z>=wV-LypC|-GO z@3zp9Rwa3^&G2;k1zpXB(OBH-r&x544KKJpkpAm>U7m{l>;MW2q)V!V)kKw=U77uS}6op&Cra@DblM29PaQYXN%V44Bo#vAOL zguXF2Ki&A>()P<#+B45V z8q>J2{IV|VePd&3npp-O+? zZ1OS3M>CgXATfk&CRnT;q_*Oumd0947*n7|WiBo(VO@APCopN}jzyoiUGW5Zb$iv$ z?ilzCiB`0z7z&hshwFe&vXP|oE*Z{H8B3&DDgg(4*q`J4U0y#r>%w9$bi)q0Ps4|D zX2_-Di;sxF<0CXSGyR%uZf?AG z6|emAA3}J7!sU6vMB7rHw8jP`W^-=o+qX!8fBuPL3UZm#w~0Is(9`ZVckjAGQp7_t z-Zt{^NK$633-~I=Y=uaiR2CFZ@)}tp3ksrfRqG11N(&obLfytz=U%;{HB9=WEx0H_ zPj90m@wMvL zMR&QhsLc&WH`WWU7<)>zKAjg=lUyDHJ$~^rP1Yytk6Aatnh8~vzPh8a(9TSm#g-7c zwJXNmJWmVi%v8?;*H7Zq@T*CtsCV@8=a27yHY26+PMM-Lxjl41ytJkC1>qak`r`-a zNp&%?FDxlyJ35szVZ<8GxqCOe*x`fWcu3R-%N%S9W@z?R!7l1iGpW6I)FwP8FD5Bm zzVwXXST|*vhDVcD!NYYj6+xv{wOUV|L2JJCcI^`1O*w%_m^4?f&kZ_QwtHFl`r6z3 z`ogL?_o$71gYFb^V@#s;fz3bym2peMBX1b_IZ2&TC2Cbm;mEHUqy2O0V{mSEQ*XA_ ziZA{Fw94)HU+QU32mkXJ=QF12wITEPTmd!K57qmvum#`m+t-O}BoH$ndvcpCaSUr| zJZdY5K1sX7U&faEUSR)=k5lS@Ne|noYbG9%vZ`yV_p5K~@m0fqX|SM~9UH&0^fJsf z$zt*vQ0*kuuE21gxUD<35JE3-=qEe8i`54|O%y2y%zEM$8fN|1D6=Qf0S3&luyg6# zhq1AL_F1=Hzn*ZpkerPF>-rvdy3*lq!U&?zXmrg+g(1q;bF|QJM4Zbn$MP=~cn!Tf%7^70$nGd%q#+aXLXz3biDmik2R32Mnu4(xpxG|a!TPse~r)?~L1<8u9 zIx8~Dy5skqAjzxz;|kw*V0zK8g)F(A;Ej zui3y(q_5g1?EC(;>Ec0;1X0(dg9o|gBrm=tzoF+I6=oHSVf zHR$l6gSC`r)Uz5KqyS~Z7Rc_TaQSD`)D35>2h)LzAVRWujN3k}KyMbprI#yPb(@y~Ikq zmHf*!gClY30%K#z;&vO+&c}W$+y6pCKkpg`aJwL_;ujxGEw)?k{6p42_l!?(rL1-n zn&f58s%{H?k&{xe+vP7hd<)?+;ON@_LoVVDVxz+S$8eo?Y~PBAZd$&A2B&LL&l4q@ zH<4_fGc@zg8#a`4bR8YW=6LtVVAoc)Kr!F3sTpgKLMi8&YhSF3`x=KKp)41M>%zds zbwG3ZfRAIbVTD7{?b8o!e(g0R{e7{H{$CF0_xaZbYsu*31i!7N9;I0nCJmV_Dou(M zmxiQ->{?HmkEd=@85{9bzx5x0ukT0392WQF#FcTPc7(-o$78QFT9_2(J1!OrIx4qx zr9%^ZQitZ8bJru{PpC~qQr^H!mQbhQkc=h8gthx*xSb0^!*9K)!)sl# z+Ud`4q(K*_cD-8rstC$PufR(|f*#0e3WRZOPRYBzCpu&6TZaJnufrmq1cdbX` z1){ktCo=5cJa+0LX1C|RzLPnWV zuBi`@R$sp=Gu9T}!L^RBh@%&tA7emX^sjTF2uubLc{^?=z%1(Q(50B_ETag?U_>8Y zB~oa*4_cR$DENzLwZh&Xarup?*mJHEkYxKZz5bu@&JV0^H#=l)!c`;(#_fP;g06IS zqG-m44COggV&K34Z38_f;G5}VH&@%+=V-Zu*3p5OqMvyi6R)JQD<=U5D-b%bEz-@c zX0>i$ph=AJGf1q48%CC5~0?xwH0x$T40PN{v38{BBh!Qe# zkQiHPcW9KGEqSg3B9q=;BRCETREG0k?V6yX3bt~mbA>7Ig#r7j`oA807E+E^WsyW> zEsVD%E8~>5$YEXjmxBbEvTrXWbY3y&b2l%;53ky7VQhG~B_4e6b?>75IJR=cxDeF} z3ME=v5}8tNb(d+5F6@fvX9^boXhu)5+VA_v#l^=5a(#wAd=SvwyKhp7sfm2x9H9yo z94R0--%Zbd&A``x%~)$l`)Dd5>JD<5^%|N@_A(%7WFD!ig@z=;n?p>&2ApD)r*Ye5z7-Fuiv&l$=>rg;8R3XQu}J$=^4 zm6MY$P~YVB#ZWNhOJ%-TCArEt-J%-_+Oc9a8#AL#x&F&S)<2*T4wH03=q99z7Nz0k zP-5*McI}D?3uA1yg@vs(2*@JOJTp?)cZ4E<3qvOQFHB5LO*h}n6@%hoVpG#%{+#IRfIOOw<4n>58>fhW{d}QhYo^WTq3XzR z-~!*-bP}?YM5{~M5lF<^bI@7&b=dtxzxB>=7@(ATV_EOz%>NnIQih$xF=kClLxtYg z^$z(=ZmyLM*-@KKnZA3NuHYpNF!1vEspWx9>GKm7JhfJ8B2rdZ^I>b{_!l+}g|+gf zl_fSsQqG#k=&9pE>rTz}+o%syI>quSP#RR_&mG@Q|IzP-k=4Oht9Eh`iGMs*H@f%` zz4p>gyu{{Jf3>*UbZK`tdiy@MIP)P!t-##M82LdwVrz2^+OLFAckyTTWI(azk&9&g zF?`3sASTw(ps{pE(?8!GfqeN-gLnUoP&_hx`wJAFg$tr44Su&EcA4N;yb^R1FViE# zQyoKJwz7wiB_m{p*hhb3ru=HYhYaO4n=veI+i z?Qn#%F&Yv=prJ1E^L~u_MPeSySEs3|kuQ;v!I#IbU=bo=M)y2!&g%k#xA8Vl5A!2j zOre~!ZuVCDufxAJoZf)S1?+O&bKugL{7zsk6Q{9&TThp1g(^pdNrtjo$N7a zcZhbF?%rHFU6OTax9g3+>>wLEA+zJgoif)!WC&9)?Xp0e4#f#PJwBHfyfTWj6HZ(A z;(rz$5JmYC?x5J@kT$a4p^i*3Xx+U#=_VcPmzGTSt`8$jjx z%jCYwbilvgs@qrip?dc|r@_A5;u5a<_3ZEWJ9s^=7;8pMMP&HWu4!JP^Bx$?_0e7# z`iq=AD9o%{Jm1O6+(FdXYa(kLM&FeanAM#?)IuU0pF^Joy5e_yO6Cv$MY9p#^G<;R zM0MeUWB=5YUq7>UrLx1*5U6OxG$9wLR!lqLzmqXx`i?3qmyK4KcgCWJ$ghK#(x#`U zXfSI9oRl>9Lqwyy-VnX@LkI1TJtt5zx|6&bY+#mRY~h7qS&f1SSMhi_H9NM#_FF?s z_@*@cV2EIW**lU1) z_!rCpT8IDPQp^9T(5#)^IdKDa?8Is|Kk-#>-Ul@{<~B%)&)n%7hkpE@o-*Lx)!NFA z2JW~b@}-ka0nUazrKiO6#F%$QUzER3jyR|AAyI+ zbjx2eYwUrlP_)l2=f1x9`SXvjAe$Ii1UNx>hnZjYf4kws|9e-*e>p*cGj1z0boyWgQ}YAI^~Me(3v*Hb z6W#xRs(t;h+%vzb^-w``x95z5!@#?Rk*jHQBUd*!;7Wjcf3mE&r2BB~&!6q>KYxZC zYaoDMC@D!xD+NrnzkmPgN+P#+`0@e>pOhF1yxz+W_LCGq6H!u{tQe;G<_!wxZ%3c# z07Y-!-0JZc4|*93P=4hWj`3^$eT`Vdux8W>rWu`Aj;&Q|6){OJ^YL%Gs zh8eA?4Y>BuYZ&GCEuCf>qxVZ`a9^ysp|*0EkC#K%PnP$5r~=5a*xB8`Z)eA>29&J5 zz|{VUNf>QI&CCpTcUO4oB=d4!_Aj@IfAIir9-AGwgRXJQIM-@)Iu#FNyjt4ccXrOt zcXmR&p-SGJeeZ3!ZMn@VtKrT>I%*}#A3NL|&uJY_U!DdN{=_|scyu4EZ#D&;=Yi>^0mXKJJ#q8;_vKBLaktj;~$noIw5hWwk_P2j} zt<^@y#@X4&#|2_a-@0tAe^gc5T9)a{tShUL#b6PwVUgD3r_5h!5)L2cReYKInPXx3 z=l8L(PeI~GC=Di3yLGXbmcLeGoa@Qx#FiF=DKPkH6{4E?H{cu3vm-w()uTVqyEpjr z*ctUog~hA_V_q`@ARp1H=TLk#XrWyg|S;(@h82&V*!ipYKEr9VeVkZ zAwM^_g%%l_Rw0tNPy+0iT$2h3gB4dO>&W6)@vGGDgYL2s`4e!|nO2m%eOnV#`$~b0 zGqPa|$rsMniT0cOk%9J$+$!x@ok-IV(5S9C`xzk@ehc&HIGpWTpBhRN81&kJ)GBOrGTu8PxiP{T#|%I1npr*rK@6CF9sctgUMy?~)%xvKWJzHoC`re(N==aCJ9 z)|1T7M#rc69lQM(#$?pBS3IW>emT4@pf{{1x4)FAd1OThwc@*CU^qM2Bt6?2YEAI6 zZ1(P;KsmKvly!k6=Sa~Ukp)&lQfwigjXOLAzV-H5=L}6;9uC*7x)hM@SCq?hwUUkU zrn@D~1|#vRxhFJoWc(B_;TvXSI2w8<6(WpC%s(PK9<1>PGLwNMJv(&}D)GC1$t!Yxqg zW{pX^jop*R>0Zk@iKi%fciJd`Gv%-0_f{eAgc~obV!wYl#jxbs6b!h;@4VaXGa^Rm z-mb*wLDRfS0|S1mZ`}&rzNt8q59b^;#?zyxxvnHUK&0giiEu#+ruFN(zmR`4O-gAv z)_6#oU(g#$c_+cTrw~v!abCt|52ZOJ?{|u+sRor%E{)QjK7cK7a=)aCPgI~nZxUz# zE7_Fav-MTDd6feEyvm}|k4(ysD(2dF_3&=A%6G8h*NAC& zioeESV|g~zANg;&@s#u3X%9xFj{Eli0Q`sw2DC>$ZmyHGwD? zFRF4oNx@b!Ng)dqPKeSTZ+?^Xwr&HhQ;H_lE$~p@g*H~e!l|uc);u?tpFcNOJ`Q~r zOK;6TSf+9aoLSynYi(T^N0wB=nssG}i^FTSs$BDi?~NNbZr;36Cya7`A|fg(CMIfD z<+BE8P+N-VC=(Mjz#0MNz;^;Sjx8wm8T$SSWPy&@?>Y?L|KGOo^UN3kzG!?dqp9TK z;PSIKzr3M0p8uPya!r}6GP>x-9&T&vj0|h*m2!scN>S=zxafSFT-Z=C1#9ieB<70x zz_~w`8(FAgar#2^zC%CGAS5?SY-o**0f1zna1PsbKwk<>HUcp5Po8ub6w@BR0|=g* z>*$!9OLa`0AKrd>o=65@NdPO~Vc>g{8KwDd=QvQU933$j$J})#fi8~x&sTz=RQw4( zJ}%+Ix0ugqQ2h!Mqm+VAwWCD~10dJCqxWhzrM0!2nzXe$3KUX9M>kBF%L51i@*8!Q zO17kyoTAe-_w6I_=K4a*!!ogh9+ph^>8^*;j!DWS-^wn5T@eclVPOjk>ivKLKp~a= z`)3DXI)!q8ogTKBF7WWxj=y7k1-^%umlXUqSAV)hizdVS0K5mrj=n_Bc+*&V$JqGJ z9pmik7I-7^Df)~A`#gX?z1?I-8C=`blwI4T?>J@c`b2HI9xM+(?Vo+|@ke-%J4WS_ zy_TmYCMbniK2GLUK5&kxNX6SiR3Tu%Mj^m-MJwwor4WOmvT2yYM19Jj{K$^43B_EvsI;Kt3$qX*gaEV zp8z0EX3`qnP<(v!w}#;4dhcidVHKIVkY5PJqFxvbd_ue-)iKXBE*{JZib@OhGF2Pp zHFJ}R>FFNf-`5tPWH{9efCPuU1gaxs&AOteMjT!rlL5R})6+Ui00M@NgA|_KgF3p& zf3r@~jeEJn=nLv!zxNopgl9h$)B$MCFHv*Y@R1#dX8pvzrD8gS!knKiP!Z@X5b6Qm!|6B<3GPYc)CVfw-o*WTbTL{K>%6d&1EkzbxsWl1P6?@Lg#4d)iFC`^%l~QxyoskK7x!%NtP=7D&sw4b=6`(bAB+=*@V7c5CZRowio#uK{u= zt|niW5O*EBZf!LNtF(puR(Flg!|Iysx?R)L)J-ej3JFbSlJ>_0=wfhlhuc zfJ-w#cL{_)HFt=@iyAnOau;vG4y2=J-3N!QI=b@G8t|u+aY>4us1skdElh4&r{xJr zL61cRrw4cLg1A~ON7Qar5lr3#r1*1M(vM=D)BF}EzJ2Qey$u))jpplk`G#CUu>x(W z3if!a-atEMF(V+a?;QA#sWYM9sN4s=%&Y&OXV0OJ{#TbFa~1xx{O13n7`L4r)=H!M zl)ctQ`8MwYT3c`1tQiDQW6V$yRyws*`?=D7IG!UbUH}DamzcpV7RThpdb$_p%5m&7VuBdqR$`HkQB9yf7p@I3ai7x-|U^N@OAJj9| zSN!<#7<&~M2-N4Z>;Ty++wby1iGm+rIf2zD%9E32`Yp_BuE>An_1TAYJ7ICOu$!AMplt;>LV;E-Of{)tzv^|GzX{=y18oeI z3TCxH#W2*EXDvO)Ms={S-lAi8IZRAxYcWi0IMODG3pKgc(Oa^*{ZxNwWfA#-Zz;>|95e zvf?{$cNN@(3MrIOF`~S9Ny6>Nvp!(z+l84<-o!3lUjg(3jGv#oyPv{iAIJ%C@06dt zu2Zvcb2$TSfoh6gZ{Mwb`w%6Fua6R>G=R|(C*)lDsqP2C_6Yzq zm-}U{_y~RYT@m1JVZ^pp7aaR+yDmY_<7YXnodsZO+oY-cgD)Sr$aE#2Cd|`Mtl4pc z-z*kF(Sfs_Hjh8c{NR6liM(eId3NB7asXV>S*sHQe?7N;av)Kji;KD_a~9a6OUxqm zv)f&v<(YL*n_q&g>*i|6v9$(>*94+)2W#^5sg3#;oJ?Dr1!Y?1saFXN&6Za3Eh%1_ ziYw^?w-0x4YH8^T(`{7!hilv0Wsu*X`LzJlB%t#I|`Simewb4-_|7XUcSF*v!vK?uy#A`gX^3Ex3ZE#C3)+sk~)&B z+uk9l3Z*!lcy#vV?Ftp`CWmJw zys_5Eu1xb|Ycl*bor6nTDvpG0Obxd`$aV z(2*ktxs|vH7O|y;O&s~9Mvq=3CMG2%I&S0U8n!WQ6l>~afH@-F@0Fs}U3fsj{(4r? zA@-0h0)Y?_K`6D`YyLnmCc?PF;%Ea`0|#km6i7LF%L1MD;(=8?XIeR|GFD21gg%5B zwA{mmzDrC-n<{HENzh+q`!xRLxWB%B@W>+>Z@lJfEdeYYr6lP|WMxG|>FVF85}%0CNz8{}m? zN3T^roO9mH3#r*((AJ+#PUjp&GH7L;j3$6uggUST^AJsc8A=iClyeax#n!Vh!^kuQN>IkMm`93L=pS2PL$(wgP^i!iG-y@PHx8{T zcHaAPZi#H;R>-xu z^%ZA_{?6+Dn}WxnDrF^ZA72TfeC{!C=2NeH#RYOaj|YNt{WK|WQ%Q>{g78Y~zxedznP+UK0Lt=Bj#Rxyj8 z!F39iC94xEBo-4!mRzPZ?|USdwK@B8^$^qb{jqVA*+yDzCUP?YcIX40 zNoI@a*Y4_s&ED8<;ruCgIIn6Md!;jy@16*tygD~_Dni&wAVzqY>UElqsy#R|E>ywr z^FPoh*zX~svs6|UYG3yyid5}0Z&7(0AnMBw*KV8vLw)JyGZ$=e*p@G1=?xWDHWSf? zPoC&`?W@ym;P z8jY?=FjqbwbfPeHyWX~Y>oQPNPLP>+TC_Q=W*}lFb6ZC^=1l1F)7-@pvSP!>HFh)r zRkNcl7%dtu%cVKb%*+Yt#YW~7`vWB$_qq<8Phy0z*JN2;Wfdo?OS~lmHoQLB&Z!~u zFD@iZQ;kM`GW|v}ri7MJ0FJemYRCLD;}gED5_gVghq;Lji?r<1peeL9m=}cgV39o` z;+4yNwYBynR={6;Rz`x$bxNEo--~-zH)fyg; z0*U2|e>eNtUkOqlH?q5dzs>e{ed>PWwa)tz1>-u~6{CiQ9-sIA8OC2ovDYFdd6wd7 zbzu?`lyUrAEvo~r;pj%V=5Qj%?AM;XhiC)me9+cyc?OCC`rjkYwejdWQxP5g1yXN` z4xHmgdpK!Zdg-cetX1yV7L-mlb+TT*%72zRwe)&o$!^={=&J=k*`-9!HyKp#44wv7 z^Q5n&9Q3wtOOOXJH2%z;kTBoJn%~tC;iBhVD`+B`gEYUB*BF!E&U!$4?so2>)6s;} zs4x!^U&*h>b9l^8uTg>{S#pI_B#EI0B+>MH7BRHW5;Rx{)xKa;;ChN0FW*b^^pLleW?&mPozv~dl!q4kk@`a?g zEdPF35!f_$|j*AT}iAVJ4x(`iV7(n z6ZF-Y{^1+K!dOYp3|yIj&~QX%>1^->-x_t+bu>g4v(Z|#{jS^~WT1OP7jIA;+KOf+ z@NjV}c;rZEA|hL!lS094vG4>r<0B%N0PbfYtAA&%;yviRBJ`_25nPf_yo3$4Sd z``2WfU|&e&a*=Z+=~`kqRF&&5oYHmto*ONCvdNO>O7VzG^scX*A3evfZ+M(7{Dszd zELU3+507Z|v&yaIbZzIFK~TZayK%8~q^E}xI-ZOAl=#CJlXpUlThkkPrO z%S+SO!}6C~9G!`J{Z>%QbJC0a+|VV&u+k^}lTh|lE>B@T*`@;pW`5Zem`0C!%$oLn z;L$~Rs=b0%+tIZx<`8?m?;4zu>EHVMc|b+ELDuj6al+mMO_uh;C3Z>5YZks6Q9AR4 z2G)Y8$OaG1`|+-D?w^^5?Nw%;ebwmSqlIp)8>jo}zAG;NkUbG};xaO(EYM*Uyo z_eAEA&WEm=H3}Qn2^-C-V%FA?fDSz>5$fN(qxtc2;GN4@g0|HCw>wqZ^ihU0bp2_2&7(DSlFZo0sltA9f^2U;(hNDB%5;`65)Hn@R4}N)ZUm0yiRQsb)cg%y^wxrI(ezvEPaV^vavqn9T`)q4) z#15qYiooPa1C#d6qC=MMfr-hrmr62ts(VqxrQl#yP1PN1#&BQCf8 zoLN^Kbz=K9qqsP$TK_icYig+!-yie~`JW8~x)8hU9iOUVQ=Jf`fN0Cl$5T1FW2K2A z<>OGg(YAQB7nQvg*IM9eAN%Qt^-OTK3#2T9-D~4OyKP}^XIg-pnWtr5-knjzd#`R| zR+r{kM~~%?k0}%nZo>pj_v}I1HW*g+?wzn9|Cp@oDvj^R;Uw4xQy7{#JNU!p-?w|8 zK7%@ucx2T5&EP5ZY{?r<2EW;Aczn<1!FSr^0^ov@|7O{YZqX1_-SkDv&po%B0^g#74%MnHB&Y3HIb9WdeseahZGkXJ=y>1{Z39Auf!(`VRAeB^puoP@4CS7tc$Q_iyO|Q^?=t<~NyfyQU$rG|5F~EV z)b-wyJyNkq5ncRdE<(8#j7$j-0-PIs@{>FC#{5*3N zQUS_V!VBsSc@uVQ*5s_)O-&g$qf6DDL=DN6GSq|wzlD~0`L_LMFNhpGG+lMreCo0Z zqd1D>r9gglCFn4Z-0=r}9s?2$O0<{U|2>P)vRBm z#WO2vVctedw>kLPS24a@{-xx5YT06^jPc;wT9fuL0Q`?E6VGGq3>Y99Cr*U1eowj*IcAiFK4r4y7 z0+i8sTcYrv;Tn&VGzrl7E*o6;lavLpq>ttK>8v$jG#}O)>jiS&etfiHZrC@&u1@=v z7iMN*`hW0fAgAaX%{@50#4Cy7%f!zm_0@3vEilDleP4~2CLrEoI1}FZX1a_4@@4Bx z?kAPbY>{yFuzdOfE$Dz%9AM->LOZW!`BAVq))ziVIqla=yoRLKE2xq!OK*t}MtRk~ zp4g?C!Rx-oX4La za`z5ike)eRvpW1$r)HIZLpP2tPAS{vdA1T5<;C5Q*O1wQirojg)38F$FWDL~I;W`T zb~Ii&fspMZbw9sGC>*)VZ>O$_<)dz^XXaNHqeZkd>k>g5=*IVmU^-Z<*kC{8yJKOF zJH^0x(KKGSjXa0EywO(R!a1so_Lhy+BG1g+B${S*vXATNyM@2NR36AqF!Vk2lALbH zDB1N3PEgDVqaGS#Mqjb5{~i6tmU7kxSrXDGkzIj}rQaA1J;9b?i#v3TXsTXe&s&h% zcVlfyxpm(wjq*f!{%ki*wbEeU_)IUuq8kJ0MWpIPqWwFe*)0c$e4BWO&HP5GY^1rq z^6CfBX83L2jw@hoC1M^iDkb(QQ64-IfKPZ%9&{hwx09t)aU8~_8LlP&_+coYGML@P z$n2Dyp2T-nSKYj7oggQ5={{Y*!*jzet5{1U$}mf8eu}c}F&J4*7u{@ZdoWMQEU@%U zRG&jd7(|bnR4=qHF3JOt63Qk9-0tKw@JV#^D}@<0*FYQUxCODS+@Rz7T{&EF*3nh_ zXa~o6j|An>;WVkyh7D*m<)GU>kR*N0q|vuCNWarfYrxmj8nA@t>xp3Nmr&o><`1wo zIU~X+M?RWJtb1n_e_mbf?JYHv(aSV28Z8bh%=eLMB(+`Xw!#f!SrY+k-$K7CUL3O# zU)mbxqdMn2h&g386P%f+3_Miexl+rq8y;(#w?SSWl@Mp!uoVwhv~1V&8aee39~?&V z8w2`s*Be5O&(NQ-)meXK_*&rjNFduN_v;?7sdaH_pZdlsrQVtl_d?d~?BmCw0mlwR zAz!{3ZHp-`URolZNKY?*gywC#_Ptr>M)bNKflt}rc;zTI+BgJh{!)Fo>eiW|6&Pr? z)T$v8s#)=Ve$qO6%s(+pyr=E&eR9A;cEjQHuoI}AlEg$n2lY4|M!V(udl-`M&(WV# zBkQ#aertF^)m6VJ^|2;hv#*5G&Q{j( z-Z8(7h2wBGa`xfYPH`gPxS2AM;07=}HeO|en471DRTwUswWyt&M|Y$GXkb}(t>P78 z61lrLRLJn3os8G2+?deJb5Oj3GSAz&ZZmXHL@6&_zV|2T1WSNZn9&*34$bU%V9;;Av!d^{V=p8456R};V7cK0;2>?HY$3%i%p}w z%sqe-Q%}66C08HQYG(Q;xi^&pwQ~hJF-Z}x1~c+e=}$B_`_~ZFD?`J>zO!}Y{HNS4 z8T2^PHk$;xI)Sgr*tNvslVhT-d4;lilXkW=TZoZ{_wKT(0((LAy2RAk`pbUs^vV)C zCvlHrlV-OP<%II!{jD6~;|Koj?kZZcNM^!LH+hC&;6NIZ^|_fDG4rTi_?z#D7sSry z@&$T1qm3C*%-CQ$8bt=B$VTO|T`^1R#dIm_( zBOc68j>DiNLyB9)+-VZ4$sjvXmhlwj$%ynqL8N= z)>U&)qHg*_Y;9MgvSfiF(Dd2pGo7~TiR3TAcGc0|V7pgX&Z;gnGEl(V>}2kOFCjlQ zqf_+kW6KKopGP@9WMfR$6d$r6YFWEhWkw^bvc)peD}S;LJBT@HS5*xja?EOK>)k01 zcqlkGo9If_da>8+WCN;&d??F)YqRzIuGV>{cJx%oyXQ64#nxbK+(7PM2I;?B1mrcA zh8X;l%Y&=*nA^ZIQ%heZ11)Qgvgs8clk|m~p&RznFQa46EmA}eUQ_Z?| zc4nSxc@HW!7}RVG_vtB0##Qx5G3K5Gl$FouG;4SxYsO~nnCp{q_Aeiomz@I&tE$L_`QGZ*x(dA39VnWs>salb=gKB8 zN$v&x-)5mQO{y_Ef02VRgEgS}g;{QI%AZ?iL!Ry@2b7l^Jrg7BDfW;qm}}e;ovDvj z9)2=vZCJuThSKD9dzKit`2utL$`w-OB}x6`uY>iDL?Bo~Q;XM;v zKm)}ivbLvM(Y1slYwJ|kv4$Y)?1nu}ZEJ8zN~mVO)+;s1bl{cLF}?8d#s~}CY=u*0 zd5KA-o-6W;60cjojo+efn(pj=+U8{#tFY-w8xoQ4ZyA`hs^dIrt-U;jC)x`tx9U5& z{A^I>#x$cUQ+>Ih#pd$h=96Rs_g7ds_K*X6; zBbU(MOE#tRUl{5S3b84?4Do+1R#fz9r%n({>Eyh+ahwMuaz~x}t%QJWbQfEy9FEO@ zt}Y`vU{0O5%%)waE)_7Wz{!M7D8pdXO+Eij{pCIA#MZO9xHM8ZT^ghljSCrSjwBPdJ?d0_CU=`b7 zzpeD-MHMFJXg%KgOLYXLVxre%dAnxl@*kAOl(pHOrF)=UV56a|$#sMo`QCL-T}Ezl zxuyqaSg>;xG--8nvDeSnPfa~OP*^yfM~cdo*5Ardg3%Nv-Ez{!k&*)acf~7y_EZ(0 z<2Q;Sl`le`m~FFn&vt9-R(C^R@?cs2$Viv|c6ok(sH-xbKAaOKtT#?{n?_dNzL{K& zV=VrV9fE<)4wZLtg$Q3@cYplw9I|x`7K{N|I0Y+NR0YR9X>%jRJ#m9>OXuvQU$544 zIae*!bB$U%+h581QPVU5%0{4Y0GW;D5b6!Ma}ZS5JoX&>%BkaQYGxwOMZq-pWZt~7 zjkznnT_&wziY@K&>IOqy9Aqzz5+p|X2ox>HB2cKmjO0Rs7&=9>c7EFF#$zyMOd&+G zK)ZEqubu8IQFDuJc3f$bfl-LzJ0zAymy7W;u|vih+mp!n7Ywi0*P>&cA`^pbj5r&T z5`!GRove-^+K5E<6e}d>e&i5nt9eL?Y>-VmVYy(q?KNbR#YwQRm&MYegND9=qF1@V zL-)n;ZmGP}h~L61s>_nP6!UiP^FT*t-YDA;f8k7UZ3wzI!)%KX+>o=1FSJHCtY&#h z6?)cS`TXNvMEOmjgaQ1u+)&vNOj%{l8Q0RU{>{`2E)|9LjRF5t(iuK1;qTE4E!DQA zt4-qDp#X`O-={&N5xDS9;l04a1KXkoMuvQgXGQJ_dE>>zgiPG%D7$P@cPmreU$ z;kD9p2*jC8EQ3~8SJxy7cf0VdO$rPT8Y+Sde`xkCq^ezLDjqJ4_AmhlFp093ewSw4 z%a;k8>w4z;I_Hhczzy~$3+kM}3+nJH^bYv_Jjwfop{Qy%j10pS5;SvLRx1jasG$v} z=~U?qwR7|%#A>hZn3$N@8eYe6A2243>HY4&9xmL{ zugK3-0pH1qWgb^O{@C-re6=1~ie*WJQgX z4YlHoa?K9(%#fK`m2c^cu8LsUNB1P*>!j=bc)=f`+ESVtapYqiHJk|(vpK{>Ym{Gq#E$tcRFT3uJr37z z!8)|xm~WZ+&A1MFtFElb$lDq74@xTOEx`Z?{#yeKAO~xVNx!~12&(xgDY6N;QN{l* zST%4f`1j9bRQwCC>0*c9V~yJN9KnY6Y5Vq#fK!k)>WTpfQO)R;`7O0$^!;Dd6Kj}< zzf$DjLoNX_i9bCuU?pMF=c(1*4Lo*FYPEK5p;(K^UlY$I|*V`BhsN(}$! zRTx||H<4RPcjgSM*J6*rq0V({dkx!XAYcR!YXm?ud?ZC%8?Fc|v}+JOWUm zlf>jS{LZqr`{YkVAG)MbQSQ(S9REOfwYrwoeoPK>E1Piz&+TXQ zYX9^UrK6+JdZ%BFiMYhBLx5<2O-!A+=iUb(i4`?#w3X`3DNnqZG7+Z31P2COKmPyh zLrZ)Et^bbvYfJ}oA^wvl(H(*SVCC9K0od6gQb2Ll7Gn$7-#=;s!@7GEq0ovYB_3|n zV)0%69>f@D_^p12yCWgf6B)3A3o?C!+LgGm8LMOMkvK!zLS9+X)kbb;Xhx*-ukd;{*`1#MP}DNxA!M= zb4~xq$w>?Q=bz_KS@#WJe77H!8N6#B`R3_iVNI^Jl=$L=#&2A=V_D-99x0j#@ExVr z=?xD?P2Iy#9Jl|Td$-y>aw9_`K=WnXN?csqKJ##eB=4b>M8iwXGBx|OK9{622@hEu z>N`8v7PPfVRV8nK_Sv`KmYQP0z(l{xZ30KQId~LZQmRP-lReG>ZvPj1?;X}u_Wp}z zW*l{fQA9+f2o5NWfFPpu=15UQ5u{fIAxKAh4bE8Th=_nxL8KEQ^r{F1=|~M#X`#me zA(WJ}vUmKQ@AsU0?!Euq=efV{GkKoL7?QpBTJL(-yWaYFmxrQHPVIC@pN!{|I^R2` za^7vlIL~v%c%uiZv3tic1xiR6tyv;0H84SbQ$+;u#>qcnN}bNi90vL*0+H+v8nd=W z@c;vhMg?!>j*b8vjH5oYtgskpJWCF)t~xrdu6>T+h(LsNjcifs(_I3B_dXG5clJJI z4u5u%ezTwP)ypyuq|tD9>f56)lY!Qo^6DsD2eSN}k%x* zKyL)*rdh1C#=P;S=~5oMogOr0;W&8eW8qz|Y-Z-cocUL|9U-!tFfZ~barbbVHs3Ur zM%P!4FbIh#eZ$Z|5xs2I?D)hPa6@T9JDrNPU1$L)|Mlh4SB8&&+qcg#??z{GBrc9g zDf|5}r97V{20;|5u1)Mau_449Zm%Nz121eQk}%8|e|29Y)qK?{&t1rEl727gnK)N`{ z(@TIURTXiS=fM8my#xED$-haG`-BFdhw{O*IF(TJC=4!%lKiQ3VTSP?LQfc;!-KG> zH-dVd^h$y6<7v}@$1`wBpk?Gv4MnH6X05TYKy)vjUD(d(zqSOtnkEnfJs3f&auG^!_$}Boee&0l)$N z+0Y%AnRiS+mV7$_rPPGgN;YKdAA~-0u+nFxlldUmQ4G477ppa90DV2nT#t&=b~3Gz zty&(ujMYV~?}0w#z?<7(v3cRuYR8^S5TRqQVS8a{iIbnB9;APS_{#_R-~GC|=z;G} zSMe!cC`a^~DDSOo^r-AP?QN;-#I24o_5ePopTkKaooe4VYoH zrvgu)dX8<3=>(GxjOiS&fBBX_ppUVIGt!DJ%m5tX>gVkE`0EMbjD4*d)6*e^aVpN--vYQ2q1YK2&hjGyb}?7$MO>S&xvYspa?2t*xc#xK7mg zZ44EG0+*5P*|l=v>m)UBia@+U-ED2HN5W`4+h0*|N7yXvpNHfG(=x<6LfQlVO0XSP$VmW%uF?a zHo1ISJgqU>S)J0W@Bl=Q?3jFlEDGVU;7_0n2ik#Mjf4* zrr9^>MWf{6f@!c8$K%MGnwhUi<52&}J1-^-5o|=C=fdurgM?BiB zFPH=Z?v6Kln*$JZHCkH_pY&fTx$;z-`zJ*BQ9t77=u`gUBGRVf_8lFDFD3Y*tnp?I z7)HoVNW5kIY~D!Ip?gyOcZLOzm;>knFRx3={wf}J_ZK^p1_#NLW)@v^O7`b|odm5b zSHSoVIV=4kW@X@%$sMMeUEVSk{wbQcJUjdKV_Y23QQJ>!R~~WkaCfuTUXpI86J zDfP(u3cjTUhihq}_*u_J#9+W+9OSn<<)M9GYqYpMcy$ShXH&_D3aPnyw+-Kk+1K zw%Pz#RXlz^96F79yJ1FoU0i%p!SwFl{6R^iofe4iBwl%mj0P%&-}Fx*-tj7T53J{U z#gP-&jW-!>LqfcT7)NHv>E`)-No>jP#nO=!`HH`f$-<1fstFyNzl7yQMMPjQ5fOtw zVH01TcH4)j5T|iQP+}_2DuN01+Czu()tl08g$?Rn`=a~%K~zY|0z4wb&_G0q>Y1lc zwL1AbA1JZj{?-gMR)$_NngsFD(fC8W1u75jf`sVDAenGy2Ndob9YT}=cw>uNfB$K# zlf=C)CB_9AmdwK<zR5~gU)c-TrI^SvcX&rLM$UNM;(?a9EHac^ z(5;Zx7RucPYQq>W(^zQMrk(J1NW{mouw1{+!lI6bEt(8zh?e$}FuTwB-RpEQ2Rq#G z3kWHKSJ`3s&Tl9f9wWm^KcsE;0tsHN*)z~uz8sjMw+T|Iete*(eM)^K#>4+brI8vq{lu76)b)iqQIBj6Jp%cp^Q@qLhDC zN2hVUebY10S4GZm``hg7(3MBVccsVk_}KN2-o2c?A{6~c_mzEb%Ww8ZBKf#SD_!mF zJ$}*M$VQQ7c5G5{Mre{>z7_Qd5<2fMf|6CV1fK5We+1SnHyikdB+4gTVDnYV9c zg+f_zA}w1|#U?&YG}Ig?D$9A=-<$1yU3B1aRu(&ZRu*yZs=lKIa$kaacGTjO$zaPX zjHYhGx^KEsO-^Z&6R(k?S)odBRIONMs{(rO>(g>`TQK9R51BL8Uj9Vi?otmZ504p% z;iKYuqWR9T9-h9k8y5EQsjyIf&$V#YJ)6;6ib!cL6Jt=pZ0x(3t+$w+Fg-)>Dw6Jb zl+zVv6SHxPM#1Ij;=T#2wK3XY&a`a zvcBRM8Yv`{ZYwo+Rj2g+msB*0#FsnFm5VmsSXpsIcXb=e7vPMAt*chjU-npZ@VQFk z&h8o$g89aOHc55kY?ehZq!9~B48rjU1#v{@RgJAclzJkPO?{f$0x7xn@HaiElOo$~x zd7GR4*?MWnh&QK#qwt;G91%ypw#DMp6ds&TlS=)Pr|1?Y%JJz>T2cf~PJ3=xyfiZL z(~CdHXO@sHzs@KfAv|^Rz8%zU;(gq-?!fd_Oo{&1Zz=Mm>4==09A2c3m@K;ET zkWPaSr>(K~N(D-5g^6j2LFvuh)56<^G|tB>Yz8VuU~zlH8w?p9$Rz{)xOH$}_jnpZ@Ghet%jFq+=4V$qmJC3VRFP?0Vf$3xuKhj9CZ_5QA$ zl=oRHs*?2e&mwFkOrx8Du*l)i*+3)Vi-a3mD)i=z4T<0BrOF+AGyE?!nl z?UdPl5wp`b0KVFWhliQUmSU;YuyUQ^?lrB5jr^pM{%L7+rn}wcHH~BxI31?(_Gvi{eGIJH zvkQ=j_k0!~UpZB*xvdxUO84}SW=Yxl*GRL|3w`O-!pXZ+U%q6;OrGm$PkY*CvJ`KA zZ!S#E`b^Z#26_Averj5}DdDQ|+U#Z0Z^`-S)5YpVabpe!Y2WPBxk*a}5)Io+1*Fs~ zFN0_Yd1uJ+O{Md0oG+FE$Gz1x^e0Te4_e<}dq(J}`te(5-SRH}o+r1YpqUTt`!(sPv)!uM z2kk`7sagVoVY9_UXMOx;uibr_AYo~4>C)V%-p0W`Bck+H&J5Se6+P@QUZ9T`&wW!* z_-o~2yH?D7wFe0(0h@X|?w)Ns)yR_BSGKtK!wEhn1`wo-A}c~m;%sk^XWrh0ZqwsE&PYd4>7 zYtYBB`{LX+B>}now8+`7OGYx%(iim1y9W>YN#BV&XCCn?Z6t8*ucc9fc&LXlRlC)q zH#9^nO^z-^=nA8AzwTcv%&dq6zJUaaY`zGze-@ zNrOv-f}|up0HK81mlW+?kEwwcw|FxrR(jsg$Jt`5*lD6V4Xm)|_r1}j{=1*uF%n5> z{i>CfR7|$N{5z7|<063`v4Xj|2+o8A88W3fn2$=W^^sAoTv{3}$uhay-qiH@^F(R# z{2QmUZk)4yH+?QUait(%-O0M$;Y(xTv_w&x9)J1-&O2ejw*aU@yu0h1`0?6bwhhac zQ{#oPivFJdy9|&_;5%uHZJj8Re0X6}MWS9`q<&Km(=7l7IPercPe+!P1`7%bcK92s zlR$w@^n6dm(xTtD54H8y!cNyGhARWe6Qwbfq?>X^Dly)#o^Ti`PD%~(<5FIn`+4mz z#nDCaw)DScgzF-FjG$BKbKY%<2Hz!pO2c5~->w7&(VpY0?)z!e5{G%WBN|5<=|n7< z5R(*LPyp&xIvUsVBUCF=*KUCk$zz$`awp3)${z(o$IhaT-wtYLi;6?p8NWHYQ>o)c z+OQ{;<(kvpyk-BB%jYuRst>1m^#jsUyr>@^E__TiCxaavF@;?0ZW|QGa}8v>#-Fg{ zk?jOCN@0wZU@mDA$lDBdSl$ZG;`mC;X>DhFUS#bqrG&ZrD6p1iNA0BJ1_|1J&$G6! z<3!v!J>N2fTf>;eDY3Een^l-;EAx^=o=%Du%Tj7bxTfMd8l|ubZm_~P6YzjW_hML1 zrjMD*O3_$k>(i<71}$3NGp2zwSZHsgC;N*Q`3T%JFwl$>y-LO4ffSylZW4J)`Ah(d} z^?a{V)2tlnms&b179w8V(&a^>#rM;$-7F5yF#)FXU|lQPgVU4S=dy?Ad@2V!?~L)= z94kLtfBCoPq^HXT_$IzHMQp6m@gNZPMr24&iTE=J`QC0;hiB%>=E%FM1%umln@HOq zRBOZO<)X#^A4i zo{ou&d&;VF(QOtlg*`&Tf}aAp^@$5~!hN0%8?V=4r?Eww!&cR;@# zhEn9$z0}l%!?3V)GeAX({)F!5kmn2-2#Wpll zY-MHEvC)>62Hc^x+)3bZ>!;tPvU^pt$lp#|PNN4%%#H4Khm%{2EkN_?Vhg6B9~IoP z-EcC=Ce37`qk~gIT!J%*A@RPf!KJ0u>DL~baS>iaL{>D^!MAGUsAkhWtWttQ?c zFG{Efh!JU;pyqZmRpQJq#lgsSDRO~Frnt{#WV_1kcN`yUkCl;A z)S@23lFuteBSp_{FCQiscBjVEg}v&V>C<{=`7F%#zJ2U&>t`&i7tA?7z+3eskDigg zcBg$xk{@j+DQUN1_w|(+CGUk>B&=HCt)RzUY&_mE(KodtqrID)sv$XHcAhP7dH$_$ z&2FlYUZUddrr@nV)^4ZsPYeI)Xh8%raUkKMh%^b2I#JefGh(<;Qz{2aCiPmSk`i!x6X=H+ z9uDry5RZNeV{L?WXuziHS@4m@Q=dPQQL*dTf~BiF_sS{kjXGj5I4Ml^xkSf7it+C~ ze$h4(@oE0~`lGv|fYN!p`^oC+7Q`Bchr;K2z;cI0Z<3I3y6?1$%3Yf@tBv88hsMpw z;Xd7|Uh|!wKPO3bS)r=e)+}mkY9lNe%Hen_B(IMiO=_Z1V^kVYD3Xai+YgcVooTZfb2;7@y>FG;0Lkz-?01VNL}XU)SK)ZOZmc@Y+jc< zyk}rTShrtb;^$PM9*ZMVJJYGS6oYvB_J&dRZ5eLV6NwnkiP1iyTh|@ zSmg7gZQs4`RuX+%PBb%+A~`ul&m3g)yI0xa)GQas^k{Ae1>TvW zEJz)F*mi2&Nw7iAU8q3Qf;efv$>m1Eg#TL9UIh7sCqt3Z1-wRedS6BsMERx$F-f|l zMHgJ-U?{1Ir4+bb4(8mf-=5C99X}Z`XY_{6F|_tn@boOM-OnSu=54AQvZayCT}iuj zKh3j*Zo@dM2DBLbrfL?(TDO5Z&Ch>c#{aXWBC5`?)0To!*^{8p_0Yt6D)M8+^kc%q zpWiqlasv@QQ$&c&Y>T%ZTiXsQJe?UgpZ+v4=0szwzx94({iG3_s($(cj&i77;p-CK z$}bClaNgwFo10M`w(I+%-CLGxpKNz?^|_7Tj{Rnsp3dn&4Z9`kbJ;Lw;^+9uhb2!| z264N~b~y^zlWd}S$}?^r>*m<1oL6y&E0T2Fub8Wv`VyLi4k zP&+RF;=>Ew&S}m{9W8Q-u$@x+RXyz&9W%BcunP;#YI7!$u`JDKwmY_Qpk2h&qG3jh zYm?;#wF-2t^;#*PEr#P>G+Pe0mpN2$dy&fDbDWU5UHwcYr@+%|TFg0!ySO%E*YEJE zC+8zY!pEX<7IzeANd(mU7OpmSE+f4~MeOj5;xM;ZEBCY_vIozwNe>?$+Etbq4)-x6 zq2!tsUQ-mt<|ltQaSxt0x1}rbPEEbqQBiSHFPa*|Y^|N-9j_%v$sFgL^38&RxVbsb zJyUy&75K_@${K4$I*5;V2|2%D#K4`iE&5w6YJ;~wTGWPHs%JH0@v(0~-{8o|s7nHU zj&HZ!l$2A7k7YIMVr#{J3|+^n-C=8ddEgMPEzceMT*bMw_zmT6hC`v9N>jjgMk z$6vlYeq1EGhSYwF`?W!FaGI!@6>*|mvGyABO!94NICf8}0b`(*IvtcYq3q?iGu<*b zx9vZE*1ZVx3&C(bp-_Xe;=!r^x!C7&p-QoD_QmsSkH3BeL7b<1yWWJgQq77}=Q^y;1RTSWEfvkStXN1j zzeYhS=O4qL?$0n4_*HbEH!uM7riYeFc`jm`^Q4e3Uv{T_3SLM%#IlYGnGo3!BXmal zZ}xrofWr-2)?&`A%}cjN2}W(0ps(3P_4662DENufr^3UENE2@^rN-;)QN!^`DyGGX z?bptIooJT}!&b4Fx}S+x`^y{l^Bgf!)WfWhDOH>f6Ctu`eh=2egoT8cIjjv!t2cPL zD^~*ymnThdxsm~5?l&;(ZjMx!r1&U7Gopep5Dzb;%KHX1fZ|zHwmhIQdZp9Yl;PHm zj~6v6wyb|}red{w{~z%edJ{bdDXfb!GUX-aJLcJ1!1-{(DsT8n+FARSf*!+Wju}Ndl%##v zq3KgKpz>^u?KDHyN4vtIZt!pL3PHn3H}Ho?|DS&l86Bze^RSt5iK+Hlc7fthAe#X8 zPw1Du0||Cpb#-v)uy2`hhJzTT!H0)|oViMDk0(#o zJ~*TE?nK;4G66*)QuknAI{}I~($<;U4oA>9G|Ol5&}rBEx;wqmmq^)x<1@HjjQW=o zz$YgM1;~Ng1Y6Kw#I1j|VOSIv7IP?5v@C=^k2nZvk{MUd!KQgV$O-_jFRaYi9rOwG zIZ%hRAo$a1W-bc!>7Ngb-d_J<)VXH{C8wO?kZ~EW^P6dHZ&x4`79L|HqGVt|Sx6&o z`2hbhh*#h=r!VAU^J;%mdwgGCSXf^lGrtd^iZA_tmnHIFDI@1rhKi;r^Fu|MM!?Qt zX1>W^^&wwj1y&K`i6Q=@B{K?BxRR^|1Hgf|bRY(XIrG%-|K|_PI2~zVY;0m;tk#F{ z<;!IK9Lm2(i9GJnqclYib|sJ-tv0oOuy>}fOVG)5T1R8x77m#nv}Hol45sg~s;uA- zBcd)s*^ak185V4Aj_V`JnMFhC0F!8N5ZrxD*8wb(wjKLmpq=FDyL#DM7rb z(7Z;U8Y|;!*%-WG2TOyHAQ?%8M9{TDOZLe{vib$YaZkjUY65=fg zCv`h);Zc*rWIW0eQe{9$<{sQ&hw&>=iS&O~rT+Qmf1wUjw?tfQhpT52B+$i{4ZMw( zHD#p_7WJ~RtiNrNS$|u>^-nb-d)}*BO}&5L*7ogNTU&Rkg@uBTG_tFto0*us-42(* zSk!{du2s`O+dxY1Ya(%SGA(U#lG4MAN2IOE}gjgf%*V;Qt_iH4QA z-@9jTfA8KhmhWb|rLk<#?)sdlk+@Z{>v?VldiXGTq(pCVq{K8`I3Q~JHz(WFlKdS2 zSuu}a_IktMH_zz*%};ehWs6yrWXAFEi&PJ<(r%~JoSe-*hugQ=*>B$-%sH=dWM|Zo zg#}%IyoW1qoH&EQU}H32`b;-pmq?Xm`(RbH9BC4n+`+XGAto`DqVyA+{nnPZ4|&T@ z_(Ch!-XPl0Fql*z`x6d5ygWis#P9BwI})9u6n-)zk{ruQ^LF=T7Ah&kzGHYsjV z^(f(+ot-JNCDrG`u8_noJja^mu(lrm%(Nl4h6e{_rpzs(F1fnw&4eBnbcno4fS7b zLSIsw=uD@PeBlOLMA8SpKO7pSE-7iq12fZLG@=YyS#lXsD3>(@q$N+~{Q%k=2 z7etIzSRqtp$yMf(6sXg1-52=n%eQYI{*I?@ZZ1k8X(M8vG-P2k0f9Ux^E(O1qhHGb z-rjKWrRftnZ-s=`D$aQY$+Q%UE2Lkjedz092`bz{DnS$!&k1?cA~X<7H1O~I@b{G~ z@mU}J^BOLD6uRACr0lfzutj!d2qML#-_-{Cd6CxVG!#8%{~q)hl*LQ-Vfr**)tCB} zUT56xjwsBer_bq#B_cPb{qy+7s7pqoqK&%-9)_QPx&70IT7l$8fJhqZbLZZR9hnYB zgbPsthhbGSt)=ghK$LO}%{C@!q!*MtKd*h{Hx=sQmoGXf(-oq$>1^^K+O4pKh3Yujv5r{HNKsT`?;Le0cb zVWE>Dr>TD}ZRZ7>>=4T3LV9;9hXqa18JDjvQaXRN>hRzp;0wFkZ@U7?aR9G)xr;8S9> z+=UB@iWl@Y0lW*C?adOV5AQvHjXNkmXpK^g3H)*NUdeT79Bh}2ZA?{C*Y48w$qCsxQ)T@vmP4&ULj&mn}abIx0>-CcY zAl*ALaUgLb+~2{$+S(z1VFqS5C^;K(N-kh)leCJ9!^gs>uKA|_+1Bmt*H>m{2Kha9 zHTd+ z_kFh@2tDy8Twn$myn=^DLK_*OsLPo~TCs=uc=;O4BAHEP<55bPt!?~1Yb#5m)gItP zU2%!~_h;rRjvc~2893YNq6o9ZPX{zKS$RYCi$_SF(O2&j^lV2K+@>kZ`MCJ)++6dM^@K@zRlnK6?=K9aYn=kmubgw+#Omr9ENEm#p0ct+(W%zM{rA@&y&lx}rGVW%^2%8UrPQ-JKL$X=`rNugAVYDa z%H45I#yt1OlbFCje~dpOSaoQC}KoQixnkP?~5;Gdu4?qy!WwThU2zIy90MmLQlDQT@D2j&*VN38AsEfkxX?M^Cdx#qi6&kTC9~!{wMv zT9qX_eKEeTY~>_xKJ`nqB*(h23a*hZtP(bb)tblaq`pc=4;Rc@d8nn3{3p7KP79rq z7C$|aqKG-M(doljZ$$Wd7gU!RRJ&k%aV&v6u{)VS4kGpy4N54Bz?I8RmYp{uqh2Sx z>`u2b6+vn53zamG{Hub}gA$%l@R;9Rbka&h2HztgJA#$|{FE4yI3X?kjKy&TE-cZy#eo3*A5_+UY^fqvsI;9B&;A`PmAesn00v0bj>{i_wjHYrFm)h79qKNbdiiK{^CU^1_9S7cU%bY%Od^)H)Gsttgay zZhop)&Tj9&%&3Y*lpTfXd<)aKS4-bfp|26d;R9(`5RPi87X9NS|8L}%_{X98 z&tidn{olut_8-l#Ea@X5Kqo|8Q@gDHA3f23o7{i-Mx|#)%gf6hhbK{}AS#udOkYY5 zd@$20oIdL}n|)nWG(mijJc!yGo1$%10{CC>->NIB_OlJ2Z$`oL!vp&v5RF%;V8%%m zQUoj3s@KDFiSHlw_5E}Jq_SL6FLHx4A1Ip-WIDIO=ncKk;S?>PX#j88n}zZ>*_D+! zIh9QB7&C6}1Q&!OOpW&iD)@MNt&W>)n(DbvH+Oc1u|x0+{{l+$F7cMEva+2*~9p@w%2qXZ^C7jJe)cH@Spmr5bYehI`#>rSmSI4LF`apKK0*s)b znj9LHXmcCD#(Y+cm3jqtj%PYS5YA@=8b>k!8q38i!aezrapDadV<8nsZ~q3o9l|+p z0TFCk0^1V}z|bK&`G1GE2`_Vp?1+qo@&T*PH)X#-;2&k8@QGhmik%;{NA+IHiQlf<&QjGYsK}EyzmNN>b%Qi)3veb7cfh(?NKqgmT|bdve0# z%>Na_&F5AK;U@4*>2(-*=D+5Zm3^E|!!3zshhvOEm1$FFWVO8QDfz*(L^NzC_N;3h zw5%85R3F4Pb*0rBdFHLO3N!+yy92SUp%$fnLpw4lR#K&OL5e{=#4#q9P%Pg>l}LJe zSV}(EVj|y&!L$8#Nm!@5Ger@XG8~>*Iui8`vY-c;rac8bfHxn8x}_2ni3KUw}kAAUVr9Hh15rll@m&KwISm>_#$__*0hFqe7pK z=uwgI=Kz*TZlk+h3lgds6N&#VQoX*>akc5a^4%A$D2uH} zj=BgWuXUZ2DLqtwm0Qf|g#4S!)my1fzj@yyIqmbt_O*Xliq4Fh_~U$Q#BpZ{jo1LE z>G;gDeWvQAw%>#zZ(Q_Tusg-g^?b!TV#~bgahRu>; zOJ6^-c_;=4x_#RwrlsN235$6(*${D51x=nYWSl+EFu>sLBQ5Jj0aw<}6n#$^z%#55 zH!Ax*@Y@050n6eVFZc=@sF6A1t=O#R)5Rbjx;AQ{<2chlJj}0bZe?Xjo8eDLAWYT+*olcy zsd;L_wo9GH^-Kd&B`BZ;uiS8GX+2Yoo2wh?IV!7!m|;Op%OFx)?3Jg%#E2_T1#TJF zJw3=49ho94dEs-eVatVnqF0$nL-eWnm6fKZ6(=#>lZ1h_-+zqO{oo!*NoeHdR|%%z zRaAoOmT1s{gOJAl#lS0hPv`dzT+EJAy<1{9q!wAB9=RT;bDUFU=k#Ckeq);7h@DAc zVN%HH@=L4G*@oa&hfb<(S0$w$rQ!bDUZUY<+msC@C8J`>R;5w#i@Y{j1>oYxU>M!J z!K>F2!u<}Z@T5_;w|6#hwuKvN@7=AZeHxP!b87~nAH9!*u!|ASPJtAoMs9uy%v|<$ zDX9eUa}xALosJHJO6X)l{2>6ztFjUrEuB`@sAAdSgqrVk4YqN2fbMfO86sWp*6yB_ zf1Y_6acv@iyfztiCcnT1(#sIvCYZx9YH+Zr3iPv^U!Ti2gR~7qpbWH)GFC~X`fY0~ ztDV&>@0q0~d^)<=aKamUz!z6|pcW3_*@Pj`0Uf6xd~QU|!0ztf2bPx&Dk1d&v26zN zsOr;^az}i(MQyvQ>t*Ipp?+3~-rqsntnF_!>iQUAl$6EqEc7cBqWj(-zV~QA00=-ygA5UbEZ6Sx_)n z`JE?n@H=>(l2cy$L>{ z)CSjdbq8;MU0F$zVrNHoH=hiG*)W>1K39{N>{5ES@PdG~LGWH!*c8>p+FGz->)Y2| zfA?EihL0YG3tF>{GFjzd=&?%S=;@}%L?R34tCQF;oyc#uEXM>+5A<)0d+?n5)JiK= zl9nV&&@V``eT|QeWf#`aSaL;C@qehj6Ost#)I3IgSGL)wdHs5F(FyK=Xpc(V3ESnp zH$mtzS&0Pku1@osiwd>x&MPGQymct3)EJP6K4Uo(shJ@%${+MRcy8E9<=9lCUn%w3 zA(mEZFx=i_?jt%Xo>ry*))hdyjIB?rx=E*SV>tpMwy=(o`tBr`z_oOGU6Nx44?Jxq z>uovA9P<-6U4m29OX9v`#{MCzKSx;TW_mZD`o7DVskPqJ1@Z*_vwMWX$K_kC05zd2Fd0*<}_P&3I9j5 z((Y?-I&Rbzx2>=Ci=b!nYW=o+w34OZacN~*hgEcp%+_z-crTvUxplHOo8J4@!O>om7Xizw5aFP>LkrtJ7k%Ap!? z26AJL=I!i8_X7><*jFRdZ~0wfiD+T7#l9?hY*M@W2Uzcr7eS3nko>aLr=PO0|8t6@90dLYeE>*qE7V+#THZ-Kur>6xN~b4h!qp30^oGy76G4S0dPb z;nt({(hsQ%4|!xj_BD@;weEmyjc=5Xb+YVM-6@L{T!NLd|K?mm$5YRO_)#w&qx&g& zRpE>t;y-3RcDHO7d&~QK^-6W|Nu|sKpXGeq2Xt}bYk8x$Y_6Xl5;xHf35qa6#V2JRB1@fh>DdVI)(FAJK`tT9Q8B@`J(*M3haY?7eTp z)+wx+C2)uB_q3wN=LBdE2$R62T_1oOcA{8g_9E@CHirm=vZw2rZ4_z)ZdR=*H=GNy zmVRGZPSv!UTfLh#IcXSuhQK@b_;dRvQkwVWp*6L=4x^k1H)G=9^C&%a6{n)79h=vL zEh_t-*WI$lCH_2Znk5+#;NWTt+b85JX`LT`!|t^JK!sX~mBdBrt4=5(+Y-h}oN@X0V1 zYJ5$MZpxby?yFUiX^a_r$r+l*VStu)xCXl2#WiWn37#}#K^+@D9Wmpep;ov((vz~& z<;s0>Wto6l%oa|~a#B${R;c?Sukb=_q5ZWi5!re2wLObUwvu+1OBz94cIG7}IA8l! zA2TkuNQYRznY&3xmh4*GTbg?PI^3|*qyd+eH|~q;b4f`Z2^d22B`exm0XFZp@J-O4 znjS0x%R}S4_yUq9N07;Pd|<()2x!gFYA#p^z{Ute2wEu&p1yNSSeUm`f-@e7>k>q z;BRz$%@q_sOarYf=L(Em3?I?Mmk&w{Qj8h|W`-Piy(g>6VBv}L@ecqo=~ZWqJ;xSV zL%Pn#ZrNV}BV^P&71S_HX4O3tE7Z1IP`&Q;{hDfLc@W{~4uZ;`th$3|=WIps^T&_R z&zG`wUZ9Kc+Y?G}zUfp>!|;pUprb-PQkf_{DV)zz#jDam7| z2p7lj0^%jZiXW|;y*DZ%ZBaAkWy<4m-CjI*8|vQOF~43!NDuum7iGleR_8uLZftbV z(oH|1bc^?0glMXWT(vk$Iw*H*cecovUc7+i;Y*cDzv zQrs)*S>4ByKOQnLC@Qd${x-1t&AjLW3$Nxx=}=jY>-kA3o`A>_yzjZgr=E08 zVkV@;)%7bcU9wKjk^0wD?A?7fhm`cjK)>LIrHZn22SIYjP=Y@er1%{w|! z6q}sk1(9M(0>|%-hC?pD8CX=h7aboCb@uM8iJY8FRR{>!XyCzn-e1g$dHHhM+;#PK z`#`(%R+2xV9vJBL)KMjTtZ^wOl=gQ?`i7l}l7zsO@9=4#h}6X9zvYG$zNr65E8`qaatWlY}; zT@uJM4_GtJQ_N35&G>EcT}*)B7RZkzfJ7sWf!C4eZ39onlDjm<<-r)(v5IiRt2kkh zx)CRwEHl(%!HZmlm6LY*7;{UUgynnL)n9)oywI0BB<4+7)uHsoq-}=THtDRofWcY% z>iHG~HW%sU*SmD2kdG}@#^><@oWC7>L4g@daC}Tzh0u>Dved+`1Tv4s!20r2PRN++ z*cjHs%4Zk2l)hoo5B)}2K|Vc9o#uRcSlzj)%8uJh&0WHEHF%LJr6YcZ;_o$o?%3aP zFS_t5CouGU=AVP&E|X4dKZvo1YyE!FT-lz3h|YC44yISSqKGZ4W40AeLHpkEO`I2R zsZIks6<{Y@x+phaO0Qp^Sqk5FGG8TfOAZaC5WhPYRTTZYghAyy*PR*@TZi~d zhDqT*6L&CutwUw``O8~dtH2$@IqxXp=ty2GDstYk0=5P*MkgFmVIy0})0!Q++45t1 z$-NCb@)6yp(1$}$F3;3qgSZW(lG*&%zE23!A61S1xQ!Im)~2lau(LDlB4l-sh+1=w zFK*h!IIT9G*kxD~K<+_JQvMpSeOcmyt$8v5w*D;wWrs)v9dx0-o^N#nq8(?}CRkPK z0(`u;KC&W_f=^Cdzp7c@qH%6&oVo8Fj)i_x>(Eq6p_C zRvx&GKADY5mGL&0hb_Lqy)AtDR0DN$F)XY$))F65BhOlQK1H>9|^oMVXPI~bmAFT_#;JsWA9CpJf z>I!CCUf}PpP%-S$&%pcY-Kxv@>3uyo!$DL{sk%%2$@O;mfV)H9g&Md>TR(+>v)(g$ zp#@4JY&3&$@B@^`=~Q-vYIj> zHRJO=$an+fZ)mcT66w*kOfuF%(REOxQ&wKrlM8N(mN*yvYLgUVGw`uscUu};r603U z4;uk4v$q8ss-JBP7NvOn47MqaBA@Byd< z4~C2Rsk!~4JMl5Dll6Oem4xNWV@}Ro#0zBCon_&ZsY>E4T)65D0zDYt{aqqZ!`z3} zRL|F*_Dsvpj!`x>U3voROR>y{h3323Iy9ObsTKjdjkd$sZJ^@*F=zYFO`}F?h>igl z?boJv#d6H-I%u>6%hAA=uJwm)Tj2=rt*RIG7i{w841@i}`ptO{f%XUh#pmL@C%ky8 zS0O(^Mg*@iWvSpPE2nF}Wnb=1J!L+h*|q{SpPZgofZ6NL*tfTe-gQ2&V(T9twVt+J z8}E%KyOJZX&k=|kfdA$su-J{JEVBY%CY9dke7~*2)ihqGGog; zeR4CEn_KJmh9M`0;Gc>6@rrJJq4v3t-M54`4I4&nH(vZQImmuB>XuQG@x5gmWO-(V zm6)aZeD`XadtRr~q_L#oiO0xUK+?_7rvRAUsGSiez#%zs@0|$XcBgTg*1d(Yag$6Z`9}cAtiBpf*>T6 zsRRB11VDN4W&FEx+g^pee^tXhQ*E&O52Wi07Uh4_3LUv6T{r|Q2L|OixXi=mL}@lk zyV>({OjKkYv9;|gQQE+w#P+ZFD0v^<@?zNquGg*k0wLwUPptd6$V6Tzy7rrx2QYZY zeq;C^l9%Brc&5yhBK^bU>4=~pK|$f0v)C4a=G%kQ6$#??bXxXxqL*mG9hV^E9BmbB zNen|{o0%zcGLskRG^*oW9Up(n-`!Q-;H>*mf>7Hf>tp*@T1$~bn$uMujE@UgKMxNU z76bEwa#%~77;RjQgcwf`>bivJ&2Gh_NCiYP#(=V9?TD!M&*`?GT^gphNFq`5jCwC96jk}yk zn0|GC?&y*afwt2#MKWn||LOwigr{sFO&ejc3d>uN$*SA(?$YJV(6$t1B}w;P&pHpv z_Ln;x*UzWRZ6r^n$g#8RNfT~o8bvItX$0PI6K*z;S3xa)rUXuM06lL=e+sm*x1#TM z$!s)?0fYy(_tPzTX8k%AqVe8#`DD$`QT zXKaPf9=j}EJW}Tn$|G4D7H0S4whi6S#?GwcK6cu_jpaf4{J5>HsL8y6TcpQu-DIenS=pPa(P=xck5$K3 zT{SMASNO|zDyE>!_Y+}Fk)hF_x-^#W0{)Na74y@L39VK;e`ZO-xP>;n+98at{O^xu+nns##8`hn>d4wDR9v!2-;Zr$RE71l7;3gR^H8D zssOWpvpnB5HaR`~L@f?;X|D`mT$zbh+$eL^{F}6r@X6il90)@$}@NAwaNmQ+b-HdF_Jf&-Rsd} zn8moRE)k)Qu9>hS3rCX~7rGXtp8^91u*n%1&_SFEfsi2oHd^Ol@8;%m){Tcq9+F7% znU+}@8xZXZFl+FOXDe?g^;tPCx;S3vG!z@Wz9d*vErS;;0Xj&AyAQQmS!fi-UIN)}!g)m&Ek z*-qK*pHNG|NWLQvuth}l*`LVk@VoeJ$A*|$0M31ssOuz#Y9wL;e1gb7zQEbsU?XBsJU73Rv~hcdli&A3Sn${t)Lt8 zbWodWY-Ql|Xc{l{ZuqS>P=0RDukd`{OzR7`Lf7NAUTch_3glNKy($49%?g0T<=t(2 zMR3P3tr&V)n)6-$kK;pK0Xn+fc$$>~{qkRtqcVRGME>+_V}s9S?e~v2G<q5@hzx z9V;&fAw@aU`p$3P)VPF&#~T7xCdTXZt3drmF!BHwC~!chjS3eop*g<^L5epb|FMe> zMCsrN`-UwB!)!4JiUI}@nlYFW;TA}j6+%3O3<=2J>LHd`?#*Q{3?(M+t>;Zo?`$&t z(K2j0aZQB=G^R^Yqp5^Yku&-%`}r+m9sHOYEpebtPgBu!BNI?)aX+6BiroqxfRYuo zS=|zZK1TcHzis00e_kr{zidSNIa4M3T1bR&La}L@%78O>@6z@S;4)FuW=gtajeFle zkx9TZ-sxL?Jm5mZmVnL{Mzo|TZ4>>!CYJX#G5BCwE;VTs;EzWW>tZ<-{MYAn2B?zb zZ*$1j0u6+Rp-dRTgftF1VN*dTCADzy)%WeR%p-I+r5n)cr2Q89e7_eX?YE;iC}DML z2m!b}0Ypgt4+Q%JQGXipXg~7_2YMUCozVyNVH+?rXb0zzfJ3&u63NCus?2&(tSRGBck% zNtH=Vbej;GSp5WuqMY*iwX{}5J;9X?YwMQarK6c^cPlwczba=y=>I5jLOa;d0RIQ( z>Ho_%_rGog_RzmM6fl~8hW6IKnv7EC;a_Txwj2K2ZifAY4anHk_eIX+I^X$%JOpUJ9vlQZfZ2L^*ukdPrMw!TL2Pq*T_hk z9HHy??^5e8U+R2TM$iF1+X`y*O_UXYjA{%~Y~%u0Y78kS(a=n{`ZhU%vIV|vVQR1A zq&(^OOYkyR^s*=xQ&=lZ9o6{4A}%7L+#d93cmFuz!d+Wst)|r-bB>^C@l)&u{L2@R zW1t7TV^GKwP++5t9#oWX^+lOCfSAn2{HJF?wbReIFeqHU>bJf6^Cz{0|K}SP@5Ho} z*$*EEZMiv$8xQB^+1uyk85+1wS{OfczAqUg86(oQQsXf-HQVLlt%xgUXtB_e%hs2W zFf8flket6o-GLdLqZ58RqTpo)PanM7(OfwCZPgk@OE1!SCWikN6N9)aA#1{JA00X} zJY1k_ZQZQT!gG3?LMks`Y2+eB=a2ma2>ds1VC|_XwT~V+3S)jhVbFtv@oW z(q2wJE!H_+7XTu>T?RJM!F66ve%ph{xtEJ1`23T6Sys}+iPE8|QG0v88W`zaas*Vp zL15p*;70#}Db5!+!uVS@ZQ!Md$w(xNSPO_lBIMoiU1{^lo3mE&q^pcVN! zxUZG;Aczp|hKx_{_3M^)f?61&jRaMWhm~#Cm4^}j^KJ2P&LXII107;w*Tv;FUYjE-`s>tSAwBlYH+oJGGX8#@=;gIRA7W$B(dkv|i4zkYt}4~roNk|F6doTbRr_5U_|e^NrY$}>dA0XTXXIM&(#Y;E+6mWO zSB~%Fv0x2C_@HS=Y+(Op{EVBIUMM#Eih zzl&PT8rX<(uvWg7(yJqEWI}e)x zb?d!nHLVqWoEQ5G_=MHR^QS+we>KSM5IHxIqD4EV^L#X5ozj;k?ta+!_R*QS-Y+lL zZoY)ytC#esc=X`M&W6DNEV?;^r_0Et!<8mVa!N_jTld|~q;}*5{G~}O<@S{ti!^}# zEjZ-(pVZ|QGO{D!Rrybij@P2=0~ZVy{AUaDWb3@|C-*q&p`0z)@yDWBY`2@sXrt?g z9Dp7=o%j#c7S{blC+v+5zer{W*Sm*46(2mjL!#%%C+4T}Jxy(%l_`0%__P;}t_Gk0 zSX#8UwIGU?W6IEhd+jlQMZw&U<}* zak#f_s!c#QS^Pi@oIEz7f_?ci_T0i4TQYS&^z4TZz{S*c)CRBym4zLAm1W-6GJ2;GSniW9sd-+*S|@`cB_Kt@SS4->-nKG2+sdIC2Zxo`3%G<;#+i zm$e7-TFkvS=681oiu5;WAks7?DDqMJ7B#}Mu^svBh3D%dY}fcluEy0{j1vBla4J0h zIoFGKflh)@WA_m$RiVcuA_B2AdiZcGM=QNHEzGji4Edr;3@QWIa(ka8*#PO!;pkBf zt?n*Ot(Mo*FnytkS=t1h1ruaT(YGHR$(n$d1^BoD^sfFU%~DCb`M<~GyGPmCA(Qm0 z5*-1`^?y%|5l3&-BO7o2`O;F|yuzsU9G<6{`#~MHFVI8u+wM^s0ybB``ayePU@3=@bN0ByK~%kjTaXnwl#AXMK(̒+j0|_;k422Y!33#b zAs4n|l-CqRfc6t|=PPx-La|V8E8~wy9;Y9DT84&-8I{4Pu^$OE_%}nnpXk&f+*wgb-NqM@^MQbzsh_gGq z`-&WTsFgN}KXvj!lo1q)fpEHXCr>{>i$5u+%02j@up)7q3bzpo+?`Hza4I*J_jY(H4BOX1duf&9{U;=)9t6RYi!9I*) z8uIK(M8};XQBhyVBYn4|6yshwy zID<+7ygMvD%3+*p9c;riQji+A+&$Mjl6!l@uO=K-rtQJB>(K7Wt)><|j$&SOL zE-5Ll;Qh|>sk*nfFHcQvDlNRlYd#CBuirFQEUvC@pLmKHDx|S|9=gA-@&MLpXl52= z-hTXwrlv)$=RFH)DNXL(Q_+{k{PDT<4r~^pfB~#Mpzgy0Yh2z8F_jPg7?@n0ru+GFX!+N8sx9yqz(+1 zdux7ZlDT9}v;!~^!2rFRhHxvYE}utOEG10Nq?bXWY1;JL37A{waWl=kX^M(cwh-vL zwzJX)L1_msHCxZP6g^4-VkpHS8_IM@Y{ zS-$HjbU}e8*LqSA`~J#^k-e<&!x#10QJ3aNM(op(Nc0SiDE9ZcaSoTUec1J!>a%Ar z>FG~lo%-ja{GY?_MZ1)hHm|B*RdZ>i$^ zaf+vnlOJ|FExG2gtsV9%ZK~zGJO)&I!9VIE(|%s}b@pOcptK3}m(WJY2!ek5Z|hn9 zAJN4Bp9fCNLH}H;la(F<;32c==?Fwx8lt3D1uU`rM!&8Wk|uJRR-6cRqh!*;!Y0@d zBH(1+q1iH?#YrdFSy;p7?PcT0aeP%QB0%C(5|vH(cm<+JWe2)>@E{W zoNnGkHF(cy68TtFb{>%Qzy1wHfYAZEu%x!|LJCPHSsJB`%W_edR#`v@r5_ z;I;Y-1gk|&4sm~H@#3)bo9@Xpw1_|zxdLt9;|N{mt#iH@??6;$=qhd0-WS-g+0=f3@2YW$E3k)6}C-ZBA&?4g+xa+PFjn2JD__goy;wnM2hN+g9 zFN>Ci9o&}1=~B)Y2H8(Wq+fv!vMS^6!qFq++=%6!9gE-^lqja;qE!iaLoI-#vwmE8S*=$hq3>0;vYRH`ufU2 z%b6-yVbRH2#@>8i#m|Ik!H>;r?;Ut5?n!@TX}{NEzB_1Y-&Mb1P>&J-#S=ZHX@`4c zVs0*3B0C$``s;|al)walmQ!`=)vJELzWuZrXgQYFYW|m!gpi0uBc|Hu8aMrW-lxs8 zUGUg+K>S#2YQmKh0wyCE__PpL-1p75DxmTl681&KaP5&BDhdc+zQSKk{e}a`gJ69v ztgU%?tgTBW1M3;$rjN)O&{o};mQ>6TRzb#$6cighuPr_Pruj_OzTa5S5H+isR%3*} zjE~2KoqDo5W=kh|(?zAq_eFHNXjGKCdQ{Z$<3lJ>A-(Z?-&^tY!`%HW-OmYmt~xj| z_CR1!%04Tk+R_y{pcARb46>?hrD?Sf_R5i%r{?l?t2pU?Dop_9&Raa``XO>KgS8Bn zzt#FJB~L9Qm=MUq$Wgu$8-uu}AUjWg9}HrRCO3NQvhG>yjYarmsam6>v{TQ%wDX{@ zhgvM>wZL_U0VG{pq=R)6$5a}yEkBmd4}!f1hw?0=;a9AMWI^_DUP{^4Z{%C-tu@rzY5sc4kw@$Q|))^z`UutK&^3=CO{d>A;wy$s&&89 zsRI))3t`+{{&sZhH~7xNAs^`b=yDj%i>UZE>IRa~WN#vf(kxFjc;pdd@jypFHkuN9 zKC%JkuDU@z9<{o9F2&{mwB_#&8sk#P8yloe+M)X3MLSd%gn4tah^j;~wD$*`!2HhH zSzG6629f9|xcjf{Zy)M%S87r%m(cfjwYod+Y7Jo&wnDv$ z^MimxMxi`A?8H|D-u!!o{s$8i$k&!kiv0m-A~f^z)w}wf{IvuYFw#&cJcc3$Q5VWV z4VtP830W9F55Mi1&XE<>0W6RP;L##i3#CDvjC!mSa{dApFloSfr5mdWSura4n@bA| zq>^JC;GWvMso5i7-|iFSZhdWQTN<6zSFzrsU@&A-kt?;NVnI8JEanzOeZ;#^_Dvg= zFMGOOpxKGAR~$Qkh@iN$xdiI}PhyNd@H?_sluh@Jpc*XzI%&a|FCo0v!`@}7E6vy5 zcWb!x#}6Qhmqr@ub`jZ!t@cVmHA_VR%m~@D+tgb7sqdnPDrJ?H^KVK!`(wjtvN!&;qONo>H?UF&#h@J|%dB9RwLIYzJ(uD&d9wh{JX&B*4eW8&b8r{v zXcQ9jr$&l`6iE`jQh*Cb1K|PsAS9E-TF){tqDPzFe%rVH179~{|g?Z0Iru_V# zprM^zW#vopPf@BH4}2mqX*2~WUeaAZ5tCs-OiQAS zg^vJtY*sT`Q7nvgvy0u0Ly>}INAF2X;es!5s;t?N&Z|}e&FeqEh^xNC6MUA((J6*| z#o3N0iJy(+9MC!yC~Z^&VLRoC`g(gHgV!n>RW6^ zSUtXIf>?T)Bpr1dv#57>-BeGiUGulI@*ixtgE|lSiG%f=`7Kr5y{nITh*u>T_FPmn zM5lvf3UBN8=`M*bzpbY;8xw_n6^wZWC|!`2!};K)mQ$*q-79)l25SK&13e{0ml+!3 zkj+K`XWfTvTHY)wQ|9L8=jZ02Gp|#cTl&YR`t(D#CY<~0`E(0o1FVmcz_y}19yRx51k<+MScy5*`NrJ4JYn8FmC?Fc z5XB+8-X`dtXY(ac>{Ju^%%)~?Z4$fgNXR)4$GmS-CBGH|wx=lBmzHv~@VfG!&hTEeL=%G>pnV}2_lUQ>$^&A^fx84aJ68w?>GswBr1(ua z&1^(Bka>30ctGg;0ZmPt=6)}QjS4FhZZ~?|9kQWo*f2Eus`jc`UG(N z#3Yj_{)A4EPfWtm{^$3@xtP1w~^#!WR?T`OJCvzLgv!M(F{OIwCG zqBHX1c{l&t!tl;WM91-gW;C;V!p^~L_p~9bRoR3ZTwPmdF78ht+C;t;@9 zF7~pE`EmH@R-W8YK3Lu!VjqCfnH9tStasV*_mK| zS)s_cnJQ|}QRVxq{3M&U)R|ThUj04$=84q-*Qv6 z7CXCSv=h$?rpv+{zN36;f9PaoW&8HQk0Rx^X4Wv9Xi|iKCrYN=}pC1>2JG=}r^}pgO6)oy+1 zm({OpIL?HzO}&%09{U;zo>tcckk9VeHG3(s%e}BkjO;S4^m5|4z0?>v6=@wE*2q3u z)U3de?z(-;1R4-hXMh3ryqc%d;3Yb`i_<)N^V{}GZY+FcZ8)FsUfPqhWg_M41~sm- zq+KTltc8E9fLph`Yh~S1XY?(@`CrxZFV#DqM#8{zlgn{!s+c zYN#tClQrF9tZ-3I42`S*_=G=ar-8DQ>V~2CyNtc}^SO%<*Y{h!@p7ZxK)ii4msNLT zT2S1zlDijDiqkL-^jgY(@-QzV18h8-I2#Zx53_fkh#yqQ zc1r$gjt(j8&CkhlQfpQS@DJEreAua@Y$GwP1BxwCClmKtBO>B4W zo-*bs}mY#!+G<4!p%rU7u?l~ocEyh+OqHe4x14oOr8gca( zD%~=L0;Z*l+b*4FtUDJ*y9{zGJEdt_hTZ4zYa0vVACt1>z9KeWhb|agW#lrIc)-Oo z3ngMy?&faQQfv?N;L9sPmo6tKF&7R?R0-t=eNmDAIC4KGf!s5bjpxe}zsE9a`h@|u zbP3_OAADeUh|;#!A9~%34ip~7a!yfgr%u$K3GWfkA<$Q$I~cG?+5}&oYFNF~@9Dhw zxWhfHYN(rEtNb6inXj>3kb+NPN<&g^X$M5y#^AGKP8(T2J6p(rj=Dm!%{{LhZuK^O zL`dS7g_G?xap`eiX__8MBJJT6R`X?Fr#vkFM?`)CANMzyp{AZ=he$lr9JTSU*@mlEv;F+oi@(mz+$g zh}_U+{oqJJZr&BlXy4j0KozGLgr{)3Ppp-^NxB?0?YdAfSqT!A{kvh`9Nuaoh04%{ zmu)En5VTP*C~zR0f8MkzA<=C-Qjjkza$Nh_OCmlZTpkgA%h)U#cBp-nH+TjB4nUMb z77DuynmTxPwhcb*B&{X+Dk4(m3((2tBNz?kx?sq^nP?25?2<{ao6h}+DK-QGAj|z_ zv7^1c2t;o$OS+7yM414A{fYhke>__4D}`@-Lo4gkPG8 z2={F29^BeaLUu5h*3|b3m6RMjY-dl;amj+IrZl<)J>fgGMUa`e6ucUJAK$NMIXScN zHx^O8VQK1zohq+%n*{k8;*oyospP9fR_n)Rqg;W%Cjz(vwFf>g*g6w^z`R4Yns)MP z-wBRX(;E&pHp#Yd1Y*72zS{ADAW>f{O7S$#>~jp z*7ig`zNo3=Vgu)XG;ck;my6M~H+19PfB3Uaw>F>{CX<#F;f;G;l7m}89@^VGYzM|Hc zuYVb`3|Z?OvPA7T)S44~&)5qbh?(9@HX#pH-o+*#5N33+4rgvrqB=ZWd zU2B`~|JpoQcsb$d@1X7Jk0gP=fD=1eR;hir92RsaOaEryS2DZibK2rpFrS=8~y z$k~H{-(F)DWpZQPNJ^C{V>ORDr{KT!h{y30Hi;qoj(l1IYr!|>AuT;@kmAU;l|rY`JtwKs)BI0lhqO+Bs#g{9UT|D>{h_G7 z3iKg{_UNpo0eWMdrCi_L*oEu)ao*rbRzVy>1drntm;h8gE&k3TH(>f1%uIA+nbs;> zv0UxZI`A_{f68{6TzUuvT7HMSu__&<)ibt6KuC;)#~S8TFXzT_{o`H4N9@5ZW2Nvr4Jg2#S;Q85}H(S&z5;jM`$mv8omJlz%$>ES++2rOyc_Y`jF{qlHrt7H(kEyw&aBiK| zj_^}={hVLBE*z+%GK(X1qXO5a;3wVO)Vweu8a;!z^tT92Qy~D_h=lu?)SUG~3@Q>_*DY|0GL8-SNj2}qxnH2QDJ>VHLN%;2ti|VIzffP-nj- z7NFG#(qfMKat}hFh+r?D-<18f_wwZAP*q4s`RKbFO;478M1*(F_w#Yf>Nho7Ma2i*5jQrLMwwCiM03XWdoPK|prp^nAm|%O zo9;7h8aY3Oi-y?58s5Thqj`;PBE=cr;F^2OEY(;_s@rt*iHpCM#>?}FL2+^N`(8CD z<{eP6+QtKySZ?l(&C@Y#z`32VZxA> z0doz*ZrP&zSm>Jm-4L&;xo!Nf-K4?DqKoXVPMzG9rf*~#-aA?&!fnrr(j}&OyX53o zzu6qGe$k!A&ZX#6qLXgKJ7+vrIMX_~^)Qi8UZ^buq;ms={60AO*Pdg1!Rvco?7EV`to{s?V87ctaz42) zh3KqTw!Ls5Bh_VXP`Rhv_cLarL5Cf7iBr)B+FBimRP)$8onBRSlX59wbfDG{;d6i4 zQj{*13}a)ws58kOkDINPJh)8Ym}pa^ZzmPw1?zEmeo2%If0GE%zv48H?<%t>LNSD3 zHSy=|CSwzcA7}gMva|EFVq>s!a1$lzyGx7+!SYvD-rRg-{$2c`us&lWVJR^KvpQ+T zkR_9e>fUJElWWRhne{8G>mU63-ibZ13=~G^%23~9JTG9g8sw$>sryA z%{sl}*T|k;8+}^kYD3Y|eFgF`{bmMOg=+G#L71)}B|ENDyp~B&@y`45vtCF8RXwLf zaZw3z8B-#Eg#lp&x#B}uyEwuzTjXm} zd*wn3*)^MqQZz4;y&(^?1%B4xZ7t;ri{u^e&ip6LiVxbG8u9!^I@W1GdL-*`hs{}( z=7-_xC0JIO#I%0MPe2&KB~o*v7jUc!XzEMZmhatau7*`)QRxN?_lwniFDCGua74Z~ z4SSN>5bB(q@YdL_YNJg#<6F9Kk&6qshQ7=_?HK`hK%tI0c`(v3+>c=Z?c+mEXVL8i zwT$d4_tsvxvY1!#`e#=cT9UP-kiEm&&Ia%5e3v^hmNQo?rB;`Ps2RG9hQJjn5tvJ{ zq?Q#)>@24b+3GBh#rRk6%oP=mf@#sJEdB87S6_CvpF7++4B1KP-huPaQyrMaw|>d~ zoVLW|AjMpztch+rr*!3jeHrXscA?OzU$b2*`yzoK3JK_9E_Uk5+cL(Lc^e0dmHZyL zUG@QTK2sam77~3Jn@R4BF4COLhMHz7XhU^!yrwkMmYj#3rzC37P(uN`U>6`&Qk67= zYItVlrlV5k*RGYwth?GAmw_d!N?X*RTgO?pd0dSGy=(=oTq~!0`EZB=7mtefsFmqN z($4DBv%PLO0kz&)ceNHNZ4P6Hmbz%*b-#6q<;yZjTUS=jZrac`-IFWX-I@Zn)+f9Z z1+oqz)esI^{coc9D*SIm5{z*P&Py1lmR>Xt1pD(}2Py7&Ls(>FWtcnp#EBCpPoC%x zmO*ruHJEpoyBe6CcSqZxCRof(w`Vn){R2W)-%L!5klMS!?2h#QyBHxCUfwg@;;&`M z9JNW{V zeh>#<9E=R$Stf@Q1||;@jM0BI)hD!!$1yvFORFg=*Z*z8X&U_dQ@~2l@9P$)B}{Rp zfk(0fZ8FDK#DVp#!wCZ9U`ok&e}~T3%eJb~c2#*4Qi-|h(BRwx3D>=|1Bwc2wy&wy zFS==<#hMlwtdlRh0wT7&U3!+a8*X;bk#xaE@Y!H+RiR?SLD0F})1$(j=KRu0^Ep~J zJ2k~~BVMQ7|EkFoZ&`ggR|bm(#W>iVyo1u5(V44z^9!h6@0>agp6>yO^mIXsZIGP?F>^0>$L zB}eJ^TRRJGnC+c~WfNx>DnkQ&+TvS}r2~#SnZi~!6y}h+b_1h`^0E9{cl~qfOJ)6qB2t5PE%3QL z-#m@@MDMtzmqs7NV-!Z|aUZV-p38R7h%=BKB_+YDt@Dm@s?I=$rDnq?A)>_Z@l}DROz3_3SqHl@cJjhNag6h~jjK(QvlsEY^ zP6KKhajmWDiM)JHlZrk7tTa|bd!T-G+B`^~Mzgm7AS>qFqv)ew_c0)}dv>15`~nd| zvMN=r>m_9UXFBHQ27~3ci1+&ErE*130A^2ORtJ>`A6BaZDv+n2Q;WGIU<BIZcupoq%3kR!~E5;V|Nh8PVg;vfODh>j>)d~P75 zSXI08Qn47+?p4i5>lg8+@42MP;_!B#MR2lWK%kMu{W%-Y_bXR?w|;-WUvCgpd^Tc| z1mHu)5);7Fn*EP}tD(s7qhE(C*xCG-(9VYvlX~vCfJlHW0LN2fnWH-ak+LG4MkN65l5g;wxqL#X%Ip$3#|b%^ z?#lqqq)P*{^kA(@Yp8x4Mj;HyKbjEq8!!V~-uvtTuKbS{Ek$#4@_HZD)m|QJ4WL*; zq_O+=9USiejd#asTW=pKF1WL_;sdjW-*=*&!W00XXL|n^*vO~R?QfcuhX#t))(lY) z>MM@>J&Qg|`3k6XO~nMlEQ*mc2;?e@sX-72nC2C{PlIpXZrK#r60Eg;*Bv2DYG{nr zJdiWBYC`(*&{4d;4`PpAQB}cGc^kwPMgR5RKeE*Zww;*KA+<9ya3- zAvg`Nt53zB(P2`xL1@Ot%cFv@PbOd!@oXQJs7LuKq5}kg658qk0b++kWQ%T!v>{D$IPUWpd(3oZ_aQude4tqFyRp*j zJo}cI!cnj_K3X=fxIzdrUz$n1a$H@_5S6>K@*d@Un@_Wi& z#9%zZp!{zs=w0yoUr}U(cC-JsH?n~Y7GURfhwK7g!ybWaZ^LqPWp5~i(arDo{gAy+ z&-P1#IPg(pLSWW&$$g6C8JWFxEg3fv(aCxcbb>;F+Mv1s_&}())qYWL>c{`ZN2bmK z-M>)d)LHn~W}wc(zu64`>kL*}(O{_l5S(E!WQPiY6cn_r9xe|d3K`YZ!eR?u)lC3rhKq}n38LZ$*%E)7%9}{Pdi! zt+C6jji)}`KWxE_o67m6qc?(KbJ4Z&`&Zis5!20Gf*8{+2RcDzk{a$dP#~B3-8uRF z>piK>^tR@(_7V9A@@D{Oe3!;QqLbAY>KQ9wJekdF+n-kqQL~k%Xu6P2sRdx2X9uJf zU!=UHey69CW^B=ASOYZ3o2Q|$!RVB?|2zj1G^)8rz}OeUn|Ya5iddzj)iy~}zxir^ zj?6R1on%x=a&GP})IVn=&ctWZgNB!> zX)`ddoe)a}{5=!vee}5MX^Hu}2IEY$>v`=z;6f9=tF*^sz-8MN_Q|O1>*V~}?|9i`j?2(;rK3yBT1@sC77 z&-to|^|-2S)M+MdKmcmhB{Q{i-Vd$3k86t}=Q)WXb>3eJGd#skA@ynBg zC=45#bHCX~k0!JULhk~GHkFsRJsW~Z+L{e1f!e_+k@g=^T9_#*ra9NC(Uflxh#lc zZimjpt4IpPyl#hNXb427dFQAqkRqY@|70DcS1z`IsvYXON}AlGY&Px%h3u7>lgS&3 zWYpfy7cR;|BV|gFGPy^J-MdtmL!Duz*p~3-h4g_r1`I z)H;bEYD|?D8R-9SPvXeRrReP(SifjMyGG~sT=pKhQE>-Y>3g94nf#?XVLG~XTHRFH zLIXLkLIc4%M(RhEJQ6@JA9D*P>P;p7lQ;bz(XQ!V)tSKZ|C_orI0J7G)}gyKw7NQ8 z*WSLc5X*@~weScE(sLFUpE$8NA_eeG>u=IeoQxk=ovX;n5ycppv|T%6*!Jles{Zls z?;k&Qr5!wITNS4VZTFqk)rNrfc3exEgpwD_Q6N4;J84-F#pHK^d*s&EGSgDOn>R_s zlP5PefUi0vb7kxE@n_F$tHRF$N*@wg+|g-XCoRL=*NRIYJX{HyFL238UlSR*<(8Dz z;O*iq-L)5P*)#=MN{#ncn}!A7=^XN#?U)Bd)_KS13Kc}l)q|d*Jm$Wk-fK7t4c!Y^&aiH`{nrr7!iE$heMyY?ms*LG+3} zUG#J6VW}f$EK&vi(wROuXuZ|Uy!V64B9PD&0U)XFpZ}z4F3-BHrE8`t=tU+l*oxOw zX4h{%C#S~k&Jh#xRB8PPG$YWIc6160$-9b8Vja=Cd5-A(_thJRhVTQ{rTsZMpunj~ zq{<|BSJGx9K93+}at2vOXXe#(<{PeDq3d1#49o26lo-9v-QD%|-9Ta-i}h5kky*W; zhkGeu?#_VZHT$OHp#qoB+%U$Q$oZ`zt27bCmb(hUb&Y|5DAJy2eez}5Cnt;S+_YA86<(7jYwer`u{g3`+vHr%}Nh z(}{#zQ`31oxgOs?I-$;8IjtLOyD2#x?jar^lBQ@`1pa&1WoU?Dv2*9Loy>S_tD-z@ zyOYC7SL{Oz#m?NQQ^|L?u?Y;cd4H{+1y8_^QYVmBBi>;|b=Z9pW5wW=Rvf|N62&r& z3JD_bphC{Qi2{wi?zTzT^YV4Si$JA48)2ZHYK<1;k8k=P% z#4D2#h~#91NF7xHrqb+?)c&EWpn%}JO=p!m#OC3QM~hUJRR2&5^*?y9&f81iy{6kG z3hYRzh3$psgO1*DoB69o-bG+BrD?Uh1U;7FS5%1N<~TMO(ztSp2Bk5_Zw%HZB1CnNFYi zx~jV>xUeCu72Tx^h1cKG3KN3&GB#;;Y1scw)lt7f=TGCV>Gea>p}8vdP(k~CKQp=> z64l7b$fB|m=YD*_kJ1A%GoW#nW|RVV#shjg#r%FP%8f+CV9NHpo$af$j8~sNF3 z2toP`9MTo&$sbl)ShKM0v}(P0(MB6Gc#s~_KeGa~-nglS2!=d2SA>d*q0PG>)BBkF zwn6L(3D@=lAM#^XRwir8L6f$sVb1Y}x_VFvs54c@)rKopnoEAGP4F=NsS}H1S1>&r6>A z>sKEuYs_@!AO~!&ZpskFO2RTTgGeu4;3v>;43)_%{p_)UA=A<%IWLmuN<5>1!Qh~Q z0saIq1}7Tie9B0#Hyg9i&z^NFDqdlkZJnF@{dK|9)W7Ed$BQXB2$Hyv&fx~9L`9l0$ zh^6tJZjw8JX-QA4C;y&GCF*{^Ton9X#JT_p;VkEpSC`1S^f7Rd9cWbT} zIq6O<`vs3~+Jx!2RHmr}=eee@ql&Rvow$a4H@~S^Z9D6-QcHq!FJ+6((0F&SoKQ>8 zDe3VR9EoGJo&R`#Pcog4@_oYjUH+|$FFGS4@W0LGB|)_aDaFZ=U$2HI0U#m>bUE8;O-=gE!Y=3S>|E^L8R6VMb#>vUHLS;3`titZ zF&74xE~1i#no5&EjNYzf?xQl@MfO+}(7f;KS4#8z9$=F0L?7+KdYP5QVU9er;{qZ# zdUpOvS%uqfG@*WxEU3Eh}Rx?l$rn!mY3>t<-bbeytI0FJ*i8}#p*Fl@m$D_vMMYHuFb#JpM=)g z`1y>Um(LX4p-cX$X0ZMM={>K9H28_L^5?~1J?xw+n%;LsFt{~%;g#j>Wn1#E`KF{{ z9L+>8nCf20MOkr`zlp&GH9npms}VG8OW}qK&i!l!rh#7>CfKM#AV!lA(J?H}ek5NJ zm1US6(w25%@StZ@y;7h-w7l3oscY8d*Y|e#iC3(7l+stlpH7s|XA8r$!#~_0q(tG4 zX>9O37<;KKmFtNWR8uUH-4s^+EM$1yfU)GM!_8e{;wHP*#7aDTQ(^Sr#?X!RR^UuF zcz;(4EH2q(del7+7Zfbyv+I6i5cENyP7O{1=QMxwSs6PJi=LmcPw#-f!D1I*@lkBJ z-o&Mn*2pxwVs~W3Ex5cgfxOeo2a0g>r8~Rg&*!po%bsSqMGye2J{8`htGQ_Dp~8B@ z=xajP!gNuD3q7jn`oYEaL6zT2A&9ZzoV8cEy|ermtnp5e&H1j@YSp&oXN~>v6=Of2v+i)o z05up2adJ}m>-Qm0N`^et=qml{+=>Bw#ieXYUpk@JZ`^nN=DNqk)erHcfh!{V)ox;E z{xZDYFo8K{^=Hha_NqUIuoB;Fiwg7Aif}qV`ob=eSfYq`w;U>zTQc}HAy`CMYcf99 zFg`Nh{_w9fz`&t3ZRnY@I&*QpND85iOZx6?s2}0mwWS+OI9l$^h=oV8!5u&Lw;)%Y zZ5@%Ateb;aEB(&#?Y1;vOVHg`zOHxo*v!LvA>WGkV~5)3pXHe*+nj{Sw93&>X&`8# zbGhRhc4GRjhO}wK`|2yp?)rm!6$HQh!?lK$FKpw4ry`?{u3S0hekiB%gsZ&`kJQGj zW3&KF?kl@tDr0wY(B@eFX*0XG!bO`C<@gR!&Qsw55xyI~7fMnLRG6|Z!dpIu2ic7F zur~Otf5G0XL1AQNN3Lk=i8}D#K zodz*uZEG#P5y3HRZz6N*yWEL8KF!ZKwfJ0gYd<$msa-+Nzs@9SE}cdDPeF700@(J|0^=yOZ- z0D4Z^d{6dW348+kOxwk#(I#m-+fS>?Zhj5AOz(wq{AElm^G&dr!Lsq-pGy;7Q=rpP zWBLk(9JbNq-`^Bm@GD(PvciM#a`8f$;&GC}LXC4&vux(Mv_o*CAJgIEnxid^o~ovS zt4-*euP21_Or9Z67wAu>!PS&R2MdQv;^Rw85u9raW9!DxUdZGo6&G&RxNRrAy`sn+ zjjE;?0nccJ=E3Pk=D&Jv99a_3Cy2||oJ$IFtL86ZeR+-<$;rdR%gZBnLS6K7X?5*H zMj6Ya&92en*<+=o0C zz1Kd!M3IH4(#&D|x#o=z#~j)-PVQE~i1+G8NogY zy|{R5v}-R+#TJUIx_ueJqU6T`REw&=)=2YU#HGd2O(Gv@ltW6GQ#ZMpueoWK> z8PW)&$=C5yMd}soeP5iWD5Q+cwfLO2vB}5xX+mv2pi^N}5eOWOT2QPMU3`7--4pQV zl!*1!`gSt^OkG`FRaM<^Y;eq+h`2C2CUcoZAMvHW$a2<@>D9T%c$}H6L{)PQK6$tf ze=Zho^$K-`=v7;?QKLjoUu|qjum*}eX)_UTE3*QVv#iAzGvE~PYB#1De+X}AHU_gc z?>G}p&9~V`hbcfFV_YDhI`km)T5H&bEpJpreEKw%;pf2wgKg5?$?4$LGYL9T-%<|l zRa^M7g8WTXoKRmap;l3Qdq1f7yxkg~%)xO!g@H z!HC2))Ay5@o~$olG~#?iLbIx~q_uVoEmqW1OU(4yP$Z}_g63;3e>u9a!5LZ;SManH zHkxF;J8NPc!eP`ji#(Mh%4H&4J!^EK%+8KD%Up!5(UrR@bmAVav%y#p)Qq%i=}zrP z7H2&anUYc>Wadm7J3pU()EVt;j*HxkkUJzbDRkl^;@J@cf4-Tsq`Na0PAo-y&?kPl z=@}bh+vrEIIa@d->{z&7{CIwAf51qyQFknS(0_6~y20=K_TVw2`m$ED%e=sRo}j_xeW&6ITQ=el@zkx07J1l<7Sm(jNSznTzW<5c z>V_LTbDAV~M5$~VXST&vC z`o{B_X)M?xJXo0BV%&yzMpP{l$Wi#Edxl`p&q6f8VXE1BmN_>Y!xiCH{`b1lb*c); zl3GY%W!9eL2y<(~+Cti!??NY-FI1b|BTLg@11IYh70`w%pYyT&T?;*6pKaEw;&=9v z9?QA1C)V4)>$X9>urV`wtQ9G{rM5B_936kD%}7sG-12u%8Kd`a{;Vfi5;x0{=j==j zBjRLpw(q611g^FUpTNWv%nIr>tPVu7EYzD3NxYuB%?DEDu+d{Ml4q$7)BcZYXH|)eYq&INU%8#lUBx z<7>Y@CJGszS^)3gX}BCP@n@XexWL=}jX2F7|CwTfJauS!>pfYz__0;p;i&M>@+=L} z{6Z*<5m+`u;0K7X2JtxCevw>&VAMh~VsfvVZ!Nn9CM(j%Q~tiUobi7V-}49Q;w z?74t2*DHvO4&4Z6x8~;H;Ns$l#1PY!E8|Q+Ps{o+9b@Ay8J$>;(mlnVf2zj}o8Q!T z%WrYC(cJZ6MLi(TD=I3Uee`hT5-rAZ6?HP9_L=TeaBeX?6ISM9b2z9)YV{5C*gZ<|0{US>}Q631NI*DY@JTQEwCET>)BI=ooi-<_B zuC7kH5>Fd@B6irl2RSus3?`ZNQ!dWI2g9S86|o^w9FSLlHGD5@wQkI!b;r08N}}=^6i+753-O7Lgc9-g{oxlC63- zc_W;@fBVLy^g1<0sNKK8^Mg=garA`K&@_bc=5*yrW&Q5Ik`gLRD>&*(`f)D+)Km~or`@k_!PyJ|b zfPmIMep^k=3j^j>O@87C+cZwn-Nv)A_sN_(u_4$~hvsSz7ql34b6^|FG9TN{Ywl zrJv>=X$p7O#$XdS-R$;;6-3H!MsS7v4aEiT;~Ob`>Mi;-ty2T!66mdku(*5I@(P?V z`;6)j_em_LD0)(`6rAl<#t&%Z$>!1tR2OW7?DgIe$7}S83kr?janNiZqJ84A#wE z3$z-j6d0;RZpzvH$I|&(!_N08e{jUaIF(D@Z+8PD@0Tu19xxZwsi3ghj>f*W74^XT zQ_$!X8Fu!zMKFgh(BQR?wyLbp`72dKC2(B)cE<1b-#L8kk?C;Z*TmA)Qr-*Il?XMd zq?R)YdH562xCH#z5^E&VSgiHCqKG|Yc>n=Wyx97D!0x8anHQoOs5dcB?-oCH7TzT% zxc@Ph{kFQKaNa{Q4TXsvUO?=P1%Fa+G7nWR1k$)rRf?Y+04#UkKedd!7M?rD@Zb>D zx6WM9vkNFYC?s9qkk5OejXrI9Wz7P&R(}4R=@VKBt&NIlf9FgrUoVtojk96rc(g3L zWn>F^0ePGz*BlI|T7`E*@WPeDTps<-0Y7>c3tj=&Ye4{d?au+ru!%K*zCce^yun#V zth@f8o1LAjtDWkGlUI}$p0xypRZ`1gl3|bU*=rxEM|^gqa`@R1;8)0cO}5vQ(yto$ z!rM-=d{lFmT5VBVXTQXzv{)=) zl_rAxO7U?)Q$onkF9KMV-qBh7`jSh7o*@H{A05!4xkZ?M%fp)c{jy$jyD6w~zO^#sI*iIM8+ z69X8L|9C|Yo3k?7j*g+z%p1hL2boJc6R?~1`Cq!bn}YFrw!l?trEh@y)N&AQZw?-k z2zYr^_m5*r)a{oA8_ykrwpRz2pV+@E6tFsLe99m$Kr?0L(F$NY@TBDC+`pfbQ(zE{ zw2IH-HkcRcKRyfjY!Soyh-#+=|A~K@hH=|72<1eUsnENh>bG9QWq8TL7I(>^eYN$x z&(`h3{Ed*gXtNV9vk*!9Q&Q@E-Q0_p-J)-w z4Nyi9t&ojm^`e{ua)c`5$VOQ@-s90Q@$Dr*VV3NCeiK>cT)%Ty|1W<$4)^StAbNQC z6WqA56LT_Z<$*i^O8zyGmr7Gx)}JrDMAZ6`pfQq?ss5nXLXutGq5vgsk~oAP>(Mf# zVb|&DnAiQ^Msk)lo7LT7UaJ7_-reGJ9p<9lx8UA4t49JbS4X)VKu(EXXYP83 zJo9{F0=_O+|A)=ik}!*8z4jFPsb~BL6N-(xgi)epg!FmkDAl)S*@cJN0_o8A0(@6? zJfN~tI!8=UaD*c%a;l5V%jQ$%8c(p|@hF5@Q3>h9%QtE{(pgCb91q%C9xU4^F7PH3 zx7&4!Ws03%?N~3&aggqwDSlgXT|>PEqoF=b7VZg05G~R;N*-rr7(Ii6A~5t2ZuWql z%=(!IU)6Ze;}(HDJ?0lBI?-^|o~IAYUuGHC^qnXY)U~%XrDUN2_$xR!o*BvsI z@%({*y1U=I=Uz-sEPY@lnDkiqqI}v*_N!3>W5a<=602}VR*aybY4cl&QOTW#j)h{3 z!TWP+Y(isoc|{2_k4!=>s&=D1vs39N{^x?FR=hg?)!ITmucL3j^8!*Ml#TCDxqvQq z%rmMvaF`i}_(nR|u9l$@ROE@x>z?O9Pp+)NjhQ7LqX!*t#fTNvsgK7*DS_bqrGT$7 zYituGQI!6?*y6_xS1t{AH^B#M_yNA-!tJgTPdqhf=#crPG0?t}^yCJvK7O8vm?MEG zP-+M3M+jcW`wo#l@b4Ax`XkQp{1b$>axOm%;oB^5_@q4s%(+aa*140qf|(?uxZFLSQ!tRjU+Z zH4Bjb#uD*L-ss@SR_@j4TlOQ?c99i$qizPKzFjUt?^6EW+h zpT%&`3;zT7N%pIF-(4kGqt!Bzyiop#~@7GXJu<8!f(W#Z&>tGl=&X z$f3zd_WN^3tl7=gxK0EMT8WIjSPTvox)|vCa0$YHxioBL{1RK1F|Sj3l8HE*ia1Kt z_?Z_CrIMZ52M<*iIMhujA|q3f&;j%2V z8dV)>XYTeW_n6@1w@(q|sLT-^ks%`%XM=5Qf`TAgZnK4XT-iD(y-#t4Q%a~_BO|(~ zb(zrh6}WwMcAqL2q|f)Jd!(l&O&%x+r^&auqt`)hFn4)Q>%FT;=;T<`$w1Nb!Zu5y z9ya3V|IAF2NH#=usK1?E!~>PTZ|~-xdX|fcp4j75)IfZpqglm&{<46i9-mV5t&$tz zRuGeIgU|?KXk{HEM{0M%dYiedsxx^%>=%BxQ&q1h6qA_N?{63D<>c+{@9!FD@c-tiRZgzFzgICJQ_-_u4tM3<=z1Z4lm=J1Tb{C9X`JUiTiv-YlDObh znpk>AG*9kT8wZdX(4BHWla}Zj8~*{Pbg1~Tq`Yoe4KBbOIPetMmcnS{8(%m zx@*FwUXO5uZu^YVm%}s&OlST9W8uK9f9jeqy~4a6EBVdJww1p`41l=wJ1U+B&%ne0nF zch75wTh2>nsL{k&y8kskNj3*}xTYgE@h~R$B?2V9e70ucN zzTcfVa^KcDd7?`^`kC!#c>v)MS1UDT*dn&u>h$a_f5)$;SJoCFR%b{5`YcVyv*@6?xNDl%d%y8ow^*bw*2rN6La5e*em za7mEot(vLMzCX1xpi+x8lZgUECin*i(}wD(PV*E+0ZdEpDcC_(ri`u*Z_qOyU{{5X zUyAfnJlZe+9ktGHMT%%;U|TP%0#>GPjnFPk>k|h5Ro6Q|U+Gd_j;Ov%tK;yrw|8^1 z&z@hS)#$0YiPQsk8yulRHqrFW5{D6Z?@uSFK6yw~-5#^|fE9e<<0A;5(%Z{!Eh^@=g9BXEU5V?dG*kfg{22XfZ==;% zTcD7TvNanzT=0Nap4oki{w0-%kY$;gYICO*it5Gky@IL^7)Kb1Ru91V?GU3{k@o99 z<@L}o{C7+_+Hn6jUTCD%9b6JyRM^4{-?_ihaEwGArRAq${xG5p|jEs$1EU?0S@TYDl-2o)9&vA&$3WzclYiN zAqcRVb;#HQwTH0FJW2|eA8!0AGS}3sc9$CK#RKOYTwe4axi>HjXI1Id__X)%KP>n5 zuQvy{wYs|M*kmk3ys46ZyWvoSx4aQ{X2Q(e=Ng4(;Hgh(DA>atz(qp z17gl)&>a*m!jE6ND~K# z6062wN@v+(yt&kNoiymYPQolF;SUg^*_t*dXZk#vILOVeg;%a4HmPvO>HJ)qltF82 zv%sCzp+46Hj4Kn6kxCHZQZ!x*0Y&&Q*aYPo4yx@3X-YLjOM@OUTNBV>)K%1?BW@5) z?NoVXO#Ch-KHr^8sjT#9`W9kowL4!vG2ys5nW!AweGjmEyx$;EcfSczcS*X3rIebU z>8DAcsJa;;7KnEKOq1onx44gv71C>x6xY%}{mR{t%Iai=(gvS+Dpw;9Rb(W+qjDc~ z{W>~f3@m}|T}y-My?w+tN0%oe9ircwTWOksiS7Gox~24UAai9*A}FJr6nJq)`0}LM zmUQB5dU}XOg|~>}@lTlXJs7&6NR$Rs3abX7te%=u-j%I?>Sx>#*esGYwcJT-s+=~W zqXUM9M6*cd^CP#jTNId$+bB-^xXme1X!gKZSzE)B7%H}=vdHZgHzYz^1bSnORIiwe znB5_8k^Mm?o4o|OaXMc($jn^D!K}xmsoPydBKWc3znX_lVqc0@OXjLB3I-~)d+P7I zD6;9l-}?Uf^Z5AZ>n_%Q0=NCAyI=CBgDD|&|v5>T6~3bX)3gp@I&7P zG>-AeJv>Xw1q@gnip7a`6bn=cv1N6RxzN@6WAV*%pXn>u1EZv)6@yT8x;r~?F$!;5 zrDX5Bhuql!@pQo6Yte$Au)~pRc>*7ksypHZfVOk^G4pJ4Z+j1r$K{fdGrD$(?Fo4P z+CB9n)k#Sn1b568ji3*nj#7b2vw>A`-EvIzO}Y$i0YjJbu+RxM(K1(0q$H>=-kRy{ z&CT#F_Dc^eIBc{xTGb@$FMG=H@u_>w7eJPrHrcQYO~rGnMxR?&IkO6^<=JdH`}`kG zcJ?1@58Rx1_b$o$qF>2~*mw#e?qRAJ?)NP7IY^`xF>UvhQ$7F^gMz7S(HBYHQ-0Rt z)`3o4=}A_zc1S>yNFp@KlX4MHUN8KDtl$Fa(Ny}}9DK1>XVIjQe^p4}`_S-Uk+t*o z202Ru-gxIcG&qtpsvlU`?F(fmeGJ{^?T}FoHi9lP#<{+K#;g3mBE7tdVx{kATq!U~ zH2*?t|LA3CbjFGB9(A?2QCht~P?oF+4aF5rryfj@9-9M7w8M@JC0ZKB2d<;IxVA7c zF;X4virXweq}_$RQP7CxRseLrM7LsVlqYd(wFqSlR@OxhtVDP?6Avfv#d?~_J+1b) z?XMX85M*)&Cv^Z$TNqexpF+EKtdX6r4zNDK*MR=95Jlt30 zh_5x!BCBDJ2V%ZHiPZhzZo=-?YxK-+# zI)4d|l!LNhX2t1hNm8~w_nSol@d3c$3H7po@=**ckeW@{gCXP8D3r`{|ElE9w~VZ{&NGpy+!67N+_D{762{i z7++xO;eeB#Kfk`ap%E)NHo3LRB~xrIzMIPI9Ao-3mM1t^-uDl%!@c5rOEV^q>21~@ z4fHD|gk>k)yjExD=dnn1k}N7D*wa(sFp%gzD4W<=0zJZW21`Jo1-Sr$7I0Z9XaOvx zK#qFsys#9|L`{EviYw*y1vpI_{H;9Qsp%$K(I=d6*~G#KP;c@`!dta^Z*TNuVd7-Q zCTFp=wSsC~q~j$emohdh#q5heMhG?RJi2=`F+A5!*t+TIL5T_5wl*ZD>GU2#HCBHsUk_Brp`)S)l^7zUMI=b{H*~A)aQ@+ZyGh6OV2{$_mM-LdOm(wIG4T(et}tT?%YX0pqM4Ztvb zfJ~a3Dpxsy#2Bo)IOx(Uy!4kZY)2aV^r;apc)kTAc%G&t8?+E~B$OPTpKra*-xTMF zr%!=beb!huMS|@2Q)l{R&C6CffpE7!5c)1#hfW8pOJF>T7u_>}I@SZG(!PE38=J91 z@}FWqQ+p7r4UFf6#-*7U{xfJ90;tL9kP(Wx_TfgPEOOek9@uOG~N-|?} z694W`9`El*M}tx5v}zoVl~xF1S6cFG1t^#o61wWiq{yTp_F2Sy1Vk zfR94a<{J99XmmAg$}iF9rJfTm%+|`j$<)23l9fA4_$Pt7^Js4!o*^g89J&gUs*rDIFO$``yYWRMl!D>g{oxmvNc~k` z1~Fv9RD?6#ZAAZQ{P2L-^7;CXG_y)fK@_vS$nG{xP!?%Ye&#d(sz9`Y!x}k4Q~mem z`G?=sRN?9H;u2Er)Hr2cH%s_la99c87&Y1KiQA%tu4IjiIb|HIokU+0w*+l7z2}bH zfjpFM;gPZ3IqK-zS|4ergP^!$g>##c{99j@S0EFiZ+}VrIs3i(gR)KZ_+!umh{|r5 z@g=3cKE;>udZ=}&uk`uKk1qlO;Y@xx4h-LN);4XX;(hSan#6DUNF>WaH5Oh%zvoLjssO$BUEH$X=!AnuL*bifVJo@3JBUjl&|IR zVNEey)clJ++(+bXkmmu(UXSqHjtkD3 z?)%N@&dZ`{4vhfX>*CO017`>uD0ui^Rb>q(9EnUcEE~E8I6kib15<)w_%{eI9W@8v znO35%dGJ9|X;s};=x*1nGf|9gURvG7p|SiMtPjiC0bB_s)YTGdAboE_RG*}6d9XJO z(PQZA|63+X=$4!Ic>myBv|;!+Q|lj09~z2PUf3DkfVC9;DID1&=r$dC&2o(z21>uMD?m*N48n(LIE> zTr_S3sG@@oqW#TDWZqi_U;p?Ku!*{fLX14%r5}gcBbR$ zu?F2Egt`7j_pZGqSPz$H{$<}^{`VPZ)2%3yRPz3yd>pu@M8ofCU0>HtuMtBz)ok^7;c7 z=zIlqtOOQlEIPn_Lb=*W z9wYY(%E^PnQJfykz{3Z*-e6&Z)1@LK}(+xPF!d==h-z;X}r-XIs{J?`~&<;)rp$zUJl1k91-)Moo>L z6Med5eD3XHb}%dfY+DSHX&7v`Le@ugzojwP9V8zuWO}7W&$n+Fpqowqs9P@Kl@iDb zvBN3SN+Dj+C`(|ljhA_Nz;4#rVACI2aS$+y0F%QYvfED9x%dZ<55GJ|4sEpP`k64; zcCzUyIkrf50#Y?qzY#RafDuclPr+ZOnB`)A&l(|brxhNh-Gw(hee}OeWER+np2811 z#0Eas=b|-efIqxL$8lgkqlOX8UZY#(YDaHI>(KyG1fxPox4QtHm9^gv`S=BNDU}j> zR1IUWugR?c@vot{K;zB^Z4WImL^^mf>DEU;xtKr#QsrH1t9PH$uop)Ln9yC(@(7K^0H`XY@!2P6tu=nwzasr2ItnV1C-ib1NRv)pbQY8&vsiBs=DTx4Jc zEi%@Y>z#gg29U6YX`|_6!@7&;izrMxN{>jx>s1v__;3SlP8=jv4onHQiq@+Su2&r) z9YTiz3m`S|zss#*weEN`vpS!-Uq9mGzl1|e2LBF4MYB^VPH0bvR>24v_o;P{_2x(|aX3)O@WiW^kb1FP$roqciuiKLbMgt^n^mr60i zwhH}C&tk~n=%Zck-|c(!)>>F#AEUH$#Dm(W+_>Y5a|45EMglQ39Q+`Sf^z=OG@6lA zDWN$NZ^eA){nv0j@_+pmvh?!ZJKARzR-*vN2hD)6z?Z-0(-sGG2me%?(B8xUS%o?b zAEEou-waM7QJkE^)gF$HL`ufC9T83&6FUD2!1Z!yO)#zQ=t*;VIy)J&R}6`XH)tZ^ z$$>U^TCxk)O$Xvs*iEf~PGP{+rWS$V;4Y<+LBHv4NIqLq+IN0vXy|W#w3A`RW@cOC z(b?Gv3Ba%z@29c2=ykAzOaJS_0|Oz;ATP9q0kRVyO%@!mIo3EaaWEEY|84KEzwuP9 z-#J$Y>x>(YLQ_2&2DY}cH0I{G7HYu_1kh?n&s`TI`vZ2EdrI%LdJ%1@KH-ax^hm_86$+(X-cna{xF#7^(BL8hhOK$X9^WYGs49*!Q7_CR7 zw^9b@DK^6mT$dSLh!N$GgA0`ugmvA=4c;K;QCt0a!~}GZf)*Z{~)$ zvA7O!mt#@;xcBrXnIy@?l`8TG2-w;R2r%00q~Y9*(q>&=Wu3#>my%gWk z{$)(N*!9Z?yZwcg_paX9e{$a5^wLnN6ovnTU{!{KsYB-UIo^||#AnQy0R8!^(nLYE ztCy#RUf7ENxNe%_n0)W^o=4KVPImMza7%(#QPz4#staCZP)VWl}4DooK~V5cAxSy<3r z?fbdgw0sV$tRrG~SFPh>6GKCSj!5F2t6Xtq_HGM5zEoD`(CRE#m}r~T+o^;|>Awz2 zrOU0R5I|?QH*(L$R8DT_&fLt*yLVIRT%5uo_Z6mAW{4Qw4EH)Lbegs&2blPj{JXtD z>o6>Am!4x?zo=q##lbya_6Z!Zh%<1jExO5Q#hwitanOV+m4kOm^n z{(0RQeD|&HA56;B?OzY-scjYgu#!B&^=(QQBTz@5`qn-ZX65# z_5H_}159sdT%p|_^t~AtR%O{(SK+1 zydkGAY?to)tVDKbivh)QB41pLB;FS`eeTK?6b0X|#W+Cu{tJ98sZU@4~jwf(>jBfn;!X?AFr!Lw(g(;PmE#)c~6}}4HEw#0^#l%VhPPpl# z%+3m4_wP@(C+E=cx}R!0R4zhPbw?JnSjD9 zZnA{<`pXwU_N5RsKO`Qa@9_O{T4M9QahzES6YusH<)Csmjkw)$c8$2jA@cUJyzi^f ziJ=qHR#Y<413AIL5>Fc20jwWm_{BLE8}leJz$mM$+z6K$2q5?WDqTm!G;EB2{W?9Z z8!9Kqd4}k(Kxuc45Uy$gGFG`=8Km78Eo`isb)_*GA#YGVXf{@gzNoEXIHIU{aX=lJ z`HWx@)j=Yb_7@p#jT^}O|C&-(_Wv-vljkEO&AS;mH>;zI-RkHauC5p{ST*Kj6$7&` zWVbha181lSy^m{jCv2r5!j$zuL0L{Nbm~)|(9NaQIld78oxu7wt`a_S;lT#uO1G5^(-HEw zs{MHW7$a`)Hub%>$Ow~YDL!SQSG)0F(L*kNi4t5tqwHg{-TlPeRFzRJ+v-&nq!akL!4&E^Iqut%eFpBL?~u03vKG>#|u+uol<$Si$! z=z*)|T@0zZ=;LaOTM#qP7U$xW58dsM>B{hs$zdze!|bwZ^0=I?k85r^-o`~S+?E=T z!&S1ML8H-{No?%V)!`=t3$_OAT)&X<{aahi{m~h?Uj+58O=!;Btv`0x!rzy04SA)#0I&*811b_`3ggYXWL#M8sqtcUGW{bn}3FaFYUAj$A%$Y7{_@>}ub} zgE?ZCBq36jx+Kx;`b(m!+V$Mm2%snG|NWzQXux-seR~Y@G-Z^OBqWr0-&HhZyC0Rh z?`G(_Q;$kXc5?oWe7|zPoh@JFhQ}u0^?H+2B=O0Vs=xQnNiJ~b(oePO&x+W;s3@deBd=g8?>`+|qlo>mkjx@yHxput!P)a3U*^)5qaHc{<} zz=@M3xUJNKf(!JeXt};p;uK|SB;Cw5Cw4sp+hii`JDL_|>k7wRvYzv@;hIy(WFK)5 zwe7EMZ}QmQ#VB7Cr;_9BtA3)UI@}is z_8BC#o4+JCnUplGk71D$gLZS(cLdLT9~xBJ_zr4Uo-i#vM&T~_x|*aluM??>0naX9 z`)OKTdNRcY%DR8_OM&_YU@CXHReV$Z$LWI z7Q=7X7cCaqr3l2WzZ*^&?OiUIK(G03E^Tj5v}qTW=GX~->;S{=Io{l?8cgCUEvS3) z9{&e$4+gm6ar2b>A{xF+TczhPa-sy1J@S&fp|hyj>=Mh-YA&sq;Hzt`d2F~%IYR2t zZ{_;^m**}~icZd5y8bau_mm~j_JzA7*Fb=fN(mhadBOJMbwp+3N^iV?c;hP*A>q## zm@h;I>I-T8;ylK)wb{kQ<6OsTUgi39^NqV2Tr5(>G-#2lG*@`916nNds|nucqCp?$ zbj%J?UTVlDki2E3TrfJJw$j>68D=%_*#PsCNMwG}2G=n%H`~+|4jhaiBpf=K$dvAT zLSH*~m;jm3t%^iDk(2=wLD}nx=n`%)I2AKefPMkiEyO}g@bTQWrjAD=gjD4N(he}~ z{@?=yk>>D?2X1aeDe6jYLc(~RHFIdBqnQDgqiwpudGNHn*JL$H(d+!cJsu;$wx@YE zHsn{;rOxQ3`+OX{A-kJDek?Y?3#a7H>vj$7w!?J<4Ynpx9=NEOo~llPul8H15ueof zxJqj;-)nX>C)+jK3-cs0Q|?a@#PRy)1BYt!_E>E1oXXI7O6iNFd43i2%8==sPi@Pj zkh3fjNqO&BJrQ++#S13P0Yb{9t_GP+md$>086kE4ZYPxmuUBiA#JNXQrd>~HjgN;& zR`P`h*v#Rcr^T;3VU6L4bup9@aT6bz#n`Xs4JN)s;HIZOf9~y7&;I7022rcV@*o9x z-;~W8e)mC{R5ji2-_rrVuNBNoNEu&+Ub8se1Cu*Z8maSH1$V*7GsX$hyEHNJoA>|m zyJt)1A4h|!l*Z#;kzeQ4?Fzm{Mb2PUkT2NBV}h@6*&yrB^&kyd-9)dbk$SRhZ8hSI z>rXBeBNq!trZtIuF^Wk1I^q=Jt=g$&@6fGz#M@APqqdEpEE~s4mO{uO_NKLc!@A?; z3XIf>nj$LgZ*Vd9e;tU7(bLiD&KEX9MaezTu6`_LE`7dkp-9DiW*+erB`_E(xXjlab{T8oXoi>X;`O^ob$x~WoC8@Bzz5L}%Kq$h|w)*^A>XsMj zY>h=urO$!Gp;QX6oCIo<&HyVdwc3t7Pv+7 z?WZX$*nP?j`!W{j61Y`VJskXOnxpk;`rNUqmN`tBY6jb*&xqZHr3o~8QMb0($~IKi zdm4BP-gz5W2TMXXeyq-ER|EYv*jvT(kO&kkP-Wi}{_lSq@v$oc-mEA;WnonwEvd0! zi9`PVj$pQ{?k76TzhfSZ{%B!z?~{&IZcwVr`XV3Ie|<47|SEx@WD2`)vmG4Ver*Z8yj0OiI=FT~N{L$Bhgg9lZyOF0KYW=xWp24HEU7msNia4E= zd@0b&*m##_4yOKinCI!elB>+hvlzUf&Xq#T*a8kR1TBpB}I}+uMspYFD!dVoPmN8|YG7lj7|Kf>rIE z9JAR;)l`XDd0yFHZPvW9-M60A7JQEH7J2Fo4aY9S+j$Q;GVewu%b1@F8=#JUWntx} zZeS)Rly@aQ$F4S-%oi~~7`4EHLswafd@+dY6XZd>HKBVDd++@#d}FK>ot zFboA&ZQ3_^pkU$i$y7bmL7fS7ZkYszOGat?R}11YD~Ii2d~!010=CmUmb=^AzLW#g zK21b$Phq>Zv{~}!X}~bSME&!yj-#0>FFh96`2GY%r7$UhkS!>{vsCL`?Xzd=W9Ft2 zuWggpg;^d*8Y%9)(^9Reo@E1W(4ya!D_zP|kKK;tE(}S#8iFFHrD@^ipQj6p0Yuqn z!}sJyYP13quR_qq_iN3O3_q)Y!Xx0$DuJ1A9tbG5rvCaeH`vy&w1`*UC4*O1Z(BAq9!A91XM_Zj3RGD0o?cRU>a?@W^?GC4uxF=`WEFR0La!*M z$)tI2ewzS64%oYSMVGaD)B{a66<507Wl*=f-{nAT=`cMyDtt-h&CyHf=K3cM`=`}J zL_)U={IU=?ZgeTiNE#}=el7Sp-hQoq5hG)+hG>7i&%)BERir#z{apW0`J4aydin^H zI8*5;urRw@(SHwGbVdA!EfgAkA{=eYcfPc@lR{*!u(-&}i^=|sEXvk8zDAG8i)VuY zCroIFf6YubG~>2t3_!3;DsoI~Y7u9Jk*hXZzvy9C^^9@1rcH{jgA1XQ_>8^DEpt{u zf2Xv@*Y+uA34QGAQL06ID(M^POo-;R7(Ia1+fI#Go75&XUA15Pw8Vm$`nGm;RvV1s z$3Gd^glWBjf}hoqg<0(SSbd;W=FJ7;axRWda$HKZh?+lA>*Rj3e+A4#W%lK)#PjEM zQeVHm5Yjo(w!EAq?iU(a2G5Rs0@q)Eg3@=W@aJD0ieiLh$l&#N??|CkrxAzB1dWuP zH7GsthQ?$ATlwHSSFL7Qk&}}q7Hh{EZYbwB)4L5aDr9&iYs0C1%qWp7t8PY11x=uL z+1FakrkjNY6E}5$+jgc5ej9JL*BPtBt>yRX5T~(7j<|S8r@4o=E^5`7M*??{lMVH7 zDp<`oxWE>Ii=W)0C0M5KEf~A~k+x4yE}7p8dQrh$f+oblL8m~G&(awi$7`z;Kky7Hf%~j6R}} z2hBf0k2tt$rX1OmWYWNgZ=_5q6B_GlO*$7T_BN1kkqU}F@=a=+rn)l7n#!nY_4<2n zb*^jTU>5bCS{)t8)Vh$JFH9xYCB9;f= zQ<5TSyl+ic7RXMqping?ZjgMjl)!4jia6AP95#qT!zXs623jVSKFw1L)_*?te8iPcw7W*Fu6K_ZX{O+kb)HXdBH7^d9dml|C!!# zn=xMIjEzTo0b%i@y+%e4Vj6`KFRryZe%k~{;#8N3QPRb+iCJuRHqsdUktfl{i9&3YiQ601w8P1fl@IahkRQ?fZ5T0zcHuW_1G@SM47b+ z$dO#yOM5q=Bg5X1-xlWn^@xXB-nnshiKFDnOi)3}tj@s^Kt>~G1eRxojLi*%e>=xK ze}N?;GCS5$e=|ytd2bX!H$y38du67hQu^`f(*0)2Ne6vZpW1t!NieN@%_(K&=H=61 zv(wz==r!jMxH2ss;<@5Dm&@%|6S&&A-o#NH)var&BFdK+RawFOJ7sr0{4X;Bcf8ZB zpo^vF7~t1t;!60u6`PB|z&(YrTe9Lx1lMOh{tZpS-duYyP?T4$Xu5V4)HjL}Cle(z z&2Sg)Nn7FE?XFlARlaa^#ZJJN9KUsi_-_2_3aOG2?Za?U8$%ds7!!gU;DOJRf>~pSpux9+}`Z9pvQVOKGrH$~HoZmLl!>9f_Yywr? z1tA4=YYqFsAYS@P^OpWR?VVd-)6Cgw7*r^b_bRyKavRsZ99KtiQZjSfR~;U61!e~h z+&2gFy|K(df_bwq!`PT#onud0gPwlj^1;65R|Dj`s`BzB#Dw+a$x7kh6tV6;xI6$-W@<0%?tRp%NcJ)pJkvo!li)L@}+ z+2r_+OV$L|b!2swDd5r9MO7|h-!@~j{rZFmLo7C-yB+~WjCi9LaZkLImd2;R7fg~^Ix$Q-oN2kE;V(#vOx2c!u$Yy_qa*5UR7 zxAki;m7KR$P@;Pjd*WTONW_C%s(&a0Pve}tkE>BpN0W!4cbm>qy?$t^hLK$<{)*OnLCoOpsLN6j0s%k!4l+OPw*Z#fACX)XRn`S>J zKj74?f9Z)4!Pu?%VCD;gcbxaWviDv(irpEQ{1(E3e&60x)lqpVaCL~(TX=*k!)zaS z`I+yolI8835y94ZP=fm)Azr}8)wV7g;;B;SK4x*Z zHemVJ4>I{n_{a{5xhNu1DaGp8e#@uNi>)lp>H>YdJSz^mseVM_^KD01uRYIe7?P84 z@%?QgqRK8;EjGh`Jcd~@((G=L=J3`{X***Dd$Gog^1m;ytPr+4R=krrvvCV+475_X z!d_6L)4i088pgY)SPGV+?ABXdze=seLmTDjkabyGg4uDz5`PqX0{e2h!TINdW?7Q; zJ9g`_Lm}hw?mBsvMGZ=4C}M1FtgPH|ygXd;f&DU?;!%yZUZwc_{K&}sd@lgGccr-m zUIhctTe;dn+OeNY>9W}V6qjB*%dU;(jPxGe!9zg^49{?+ zjx*D04q!J44EzQ95Nz#(v>8SJ<^QYX=r1uqm!oWD9XjmQv@!0~1wrv|Eosgzpkxta zgcJV?(iq-j-vSgMO!Jx3@gfi(n>Vu!4)7{jn71kMoaSH0IDg zR|#O;*>jPCOUIyy|Hh4?&>~@OXnRv()Cj=#I(U+yNR#fWf2^i3;%e|6aBRF`TnhT- z-zB~O8%7_xg@5Wzp;7pErOf{?*PGhWGhJ_$fm1h=>qm;EVfkZFiYROrEfGq~0=-qH z6~pL716@7kkQGPzbfo8dz}+@BKZSNi`tk4HAdc>bfG2(brdPVU3JO*Ri4JqtsP#rtFbPg@se@wu_ig`k*Sk4bZ29F&y|1>mww9K5 z<*pBnnYteimtMf@2!KGUh4rwS_jm{r$zJy~iOnglNUiK4{9petb ziTd?}hyic`G){qC12U-fq#_R`FY*E9z)71vw;5?VE((48PeCnx6#lVD|4at@OaMRs znGF9-hX1F@@VD>P4c{e(1}%3`H~4@6J)hzhCe)RoxQt>H^bfCrY~>=Jx@9dxmxp_+ zocx?-vuP*)Kda)5;Xy)A{_i&K|I`anRyhMDC14*D#muWj#qc$y72Z>wv$JXPs?2*eU}?JNr66GPF14NNTq@yqIc-Jl$y#02h+7)`0#3DFMcIPx0OqvS z2_my}g@9h~D|HJ<2h_$7ldh0W96RXi8=#fB!1fQ~$^i`CrJg4-9#+&9IX$+vaO1{E zHHJ<^0seM==-W#J0~QtogES1cfbppX(U5hU!D6wCR`pe()amKnoq(=LDWG_;ximlD z+q*iasTtr2cmv$Hx#ayHxE3x;T>^~Fmt_3te20cyrx525+0k9=qqQ?LUQ@%tY4f=( zZDY6Ovy7l>vGy*L%J6=DFvG4kw?r=VL6rusazgG=7@HiQl;g@Mtq)Q#g#+} zinD}~U%gAoYG34F%uj4E5eSwOWnK<9z(b?Rjul$t~z7T_Ipe=V-tVAhOZ zGP{HA@5LR;ine`L#P7@@hVy^cp>g(xpHc`P&yRHxxmUBJA(_?aB+q38&)AbcCfQBfpxBSqpqO zU64=An7%q5qu+RW*TmBW>2@oK4R;GE&)(Cbb3GXwl~_?6?yvATZWS#8O_Q(!dM-$> z)HO_;bhQMfBthPq^J#z^&bJ-e1n>sh4XL|NMBx6QdlBYxYyW8rv$~LAr~CrH(TEeg zaw`=N94blXIYu}Qe$^D6eYF0E3yfZ8bd-pI`gZYRWEkkC_MZM4-+%(7$BkQVm1pP5 zAAe5|qFkAxrTFkTqFs?gx&FlHV7zu8k=DI3ol1||d4%|miWa{cA^^S|9y|&)W9)sS z`Brq2jIgLNm#`7NL1QbD@#R`C2cOl>G?1SS!L}@F!L*TpNq|@3;rorNZEZ5{qT+tL zR)ANb#;zX?EStBZYvGI!^yXl$V~n(+iU)kfF?gpZ>vER@-1q+EJg%C+%Na;n$T8P$ z)Ja$Ho&Wi5qj|aNc`eA#)dH1H2t}H+D_ESEm>ZPgf5Gg`{WKC~?&IaWYCKZSbkvRD ze&PXig#vo^L`5Uh%&M>G!X1UHOZ1RU3g5DOau0@X=9bf0_F(7$m9MpOm7;-uI6cU6 zpT*mX1A2Z>^6>w~-g`$i)wTPgvAk9k5fG%RAP9&ENUthIrAhBW=~6{{jfyB873m$M zm%Q{Eu+V#l00|1x14ze&z?m!I{m#92kF(Er@3`aqvv&r=p@yuiHP>9veCG4~N_o}@ zi7i5uB`cnUe**dxTYrN6-)=^pVvyz?^VnLn%m`aUOz-5-<=9+-zJ2rEKVc|UW_hC8 ztff22%w)->D=(;(% zFU6wibU#~hT7#OJ*=izm3rdQ|?oHC_a_0p84qK==eipW9RG>tT!? z9fc8!V0DfduQ+$ke%x*SSFxE|{&5n2XY(4F!ovO7vgT*(d+~s=FlXnpXPuqjQAR|t zN;9cfTaF8gUOQ@NG%YBdG`wu?`3Juh_K$|erpV!^FFY+5cvYI231%S&jYB&QH`7sC z%D=XT^0oyS38C@iyP6!QK(k?2YAPF@o6}JTwPazvwTCCK9QQ>Z>+&5#*04Z%D=KWS z*yl<4=PDm}b68m%0)Uh49U?(dac7mUh|KocJQNqlb8zOXYl%I*{~0;17$6GH;wY7P$PdO3AbDcD4`Ny>_Z5D8YKkq>$7uapl!?BRpvIqX? zO-(zKJ!eIMa7E1>2?c-yt%8Sp zcdRW^F(Ufx&1eJ%bl~2l!vm`e|A^I1;p1Q~s(5Xt6##s^fo8Oym|sM6XOBs>003?X zL_2!QiGlX3o8d**3riu975U1%%>rA@q{!~Ko$6AaO{rqWM+g|zkJe6bDS);?ug^bsE(rf3t;f+A! zX9ejHzcZ|E31gxisO`!60oJl|j2Z1u;8*y*xdD7frK5|>Wc~1*+~0hD70uVPvp6`i zvc%*3#5q1n`*`?(qR~o0obAXZI;62}m}}O+NAX|0#&YT-!_t4u?F%K8&OsNpK>y2n z`Y=d;xxzroOKSwzSczd5Ldp*>l;Mb~kF1Xk`Q2YVa1 zkK39`!kiK_F2aj>8f>JY;?QQ)CPCF7JZKzCe><5Z0a{7b)Dhp@`T~nKa%Ir5y6Yy>U~Doq&+#qCI6QR8R<^`$NxK$ z7x^>)2L+`6zigJs3mf@V|0}2n$fkEEC&h_*Z&%MHBlfH3$c~!v4kWH9td|i8_Zy>} zIg11uY2PVEK<)%KHO6G@itIg8y`SVJnw1rwaeNY{)XLnvC%L%z{=4SpcrqOk9Kwyf zFRW_6pvwgJwD8%B(p{NE{Qj{|%rZO)Q-ku%_is)KHUIssD}jec#IZT`J7JaY7I6Bm zum?N83L%UF>3*b&G)4u0qa10;nciLR034#d7kgOHb50Eeu~znbNm?`gqrjhU-6x5} z)k$AnQaca^SQx`E&6c!-vc^xrCRZe&-RJtRhXY;n6(HP9y)%&_%vhNe`2Eoz0DQm` zy4c@e6ol zUyF-#_@Hxv(Yx>uyA!q-7njG;EzGs|l1FV76o3YmIlG_ZIcZ^VE&J8~%YCGOzR;k@ z&u@P==x)(Vw>ksN^U0mn9+1V0i;Q^jJzM%~SCX(35xz3X2L_EIoScyuR_f?DV1hez z*E}YUYt%ndWXi3Pq=3o#u7?`~Z+gR8cRqzv9d+Smy4F$W1e(hRi(^pRV{w8=)TM|7 zr86&Gpw#@%lNalqacyn5>%Rigr^jhSes++PxCF?cg-;(|?_}m#y;pbztN=7EMUN{_ z?I%l!NcpZzkhkm2L&1RIlO*o9H&))%WIGb?zy>36eMnuwFHeH>;1ixq5skjz`2%}h zmPj}B<-UlJ5I?^VttQyr1NhqCW&?%CUh*133rr2iikBTTP2j^Em9@tT%Iw5l(5sDL zZRr7U#=WPmLMzw8UeViLgr#aT@mWMTXvhidg<)fV|)I3~Xv#VHD7M|MHgSa=D-t+?$v1Kv~BAb`iDuhQRPsim#0qtOk~k z!X6ka%U()TpbI(zd!WBPD)c%c+&GP!znLZASV;u%qF-zN^$TLkMX*3~^aQlz%PyyD zm}%LEDo&k$5czoI>)#x(FSATyFMj{{z@`F8*eBom77qS`SCnI9o28GC%i%u?{(b)C zSBU2NI3rN7dGj2)B?F$38@a^x1K4R4;pA_^p4~zny!e4)&aTFm=@>FE%U( zcwakKIs*K5y)Sgku7I;Z6te2QFMC$zb{I@n6BBi+=15xO9 zUoK}nVhu^K$?{I!Y998!+|qoWg^5v!u;JYOIyDhRMJV`Pa zPFlcn54Ss6hDT(%v$9lE_ATQ8uZ*0rjDs0NPXons+JuB!FIR`6t+hzb2o@>Nk_D2) z;1?MyZW~xhbMtv-21Z-n;R{X7Pyq6W7~ZJ@B927`6Ln~>2tu+H8=Lobnd&5IFt{F< zQ@tzzlBZwz2_AzEG|!KxN-j zBoDU$v~CWx8(baLl+7WHF>`CDXbijL(q$C7^h#F`LUUBu*W@NkOXkY_d zgy5{bAL?>*%q}iUc`GQ?FT+U@*7}i_jh6k{mjb~^_gsnXE7GJ|d53dXUP)oB&` z;RDJ$fgIO;c<+TpMp|_%s|o}02Cs)$?T9-4LRhLZsj};TePtrJe{-#L_YLO?EF>~h z%G+%fTgn-XZ6QAWzm*b$1V9}TS%_(1jYyOPP9|-U&>p7P+3JsOxX=Y8t0vea~=cQp=cNHRafD}9=&eTnT4kqSjoJkaOc&Mymgj1F53sC5_Vb_$L#nAQwY6YO?5 z+!_z72{O_L&l!=6Tq=Ab2_rSxl`QII-aZ1ciX6+^>BWP^CrlfZO6iuP;MXh4$Pshe zYQQ%9>qYNI@@{2jnOVa9JSE_lJK+W$&4N(g>OAEjc{Q-}G);FBwB21s+!Xzz3(^jb~TAL&Td6GVui=Kg5F@n za7`tum_;>F)C~u4#Pt#4eLSc~bxLK=ou?%WC!G?gC)tQ}I&5m?L^xJm(>>f+;=}p} z^NN7_lO0P@`wh}fBXaSWX&8R|KX!!iB_g0aiq}jmXwKDLdgz@vDE}mL7yeyT3@9rH z1h`{^M(mjBQO|!CRsuI@cG*ju%dT{t5d#>UabGcu>FsN`zFDZbo1x@qN^uX@n=|p; zBeGief_3v9&$&z`r1Z5vqD6|yupS8Wow%~T&*1*a&0ED@$IdJ=(y@+TEKa11X56OY zk{R~BsoPQ))?Y%bx<*{=@^`)R`PI9i_y~I_cUQ|P;u)UKEU(`XWy-0fWhrsGZ=F6b zWLJIrn%7OGJt4TWGu>dvhOzAGVif6qoV^#vEtEh_cN9?K;0wB2+!keie2sIohDz|T zMANl4OP&;MZ!>y~mPrK7_2(2%ZSh`AtXO;&fjT*J)4lN;($cBBdIpqEJ^i@!> zQi{R)Ngz$ae1*faOmQ~|Bg4UO(HU8Gzw;L?eDPin4ju^9cKcFBfABk)WNw=)zLDZPTTxFlQut3<+WE=~;q+oPY^--xmomxJJ zfOrl65AS~!!tegXF^rbXv`2*r4R5}n495M6E$pXUZrYAS{p!Uh(!agIdVhkxV%KZQ zp4lbjYi?EP262auZW7VZusXKd0Oyyu{HQ{^m}aJO4~Lpv6NC%a3RzaB(A$NHOXV8Z z@XXe}#BZ8VmWpi)q;lhj{QAA@^tF$36WVkbucphCVa0d5oKs55v!*7*4ApoDBebFikRX)N4Td&jFcUp9Z(xp;WaPV2G{zK#hk zs2NBo=JV}2>$uDJNtyK zR2XvF_#Q=vrCM4D=Ob(0ij<^_l^cXpW{;mtm*8dSkbn9VS$%%@DQX8Snx1P+TANYJ zGdnjZ(x<$Zs*ta@aj2JDzH@d~#WS&c^Ma?TlHW|!+L|uB&G20@Hjnv+o$utRgRU+h zQVb2*e7E6JetM$w{e4{ziNW`8 zdzQq=n|1INT{MOtY~?R+bEAW@mqtTs(fMU8YQw&JDelJ;_N%XYj$tCEG`n?jf?M)~ zr9TBplrhHVKEm}mWleg~=ql!?%RECdU)S=!#Dp)``P~8)LR>-rH5z15W1>79I%fvUGY+?#~}c#E5LOq^ysPai$a$f-LZ*pI|Gs2BH$Q z+e1~V zRFDbWTPDa@)L9vpAk@w6uTvK^YJ>T5tlo9!d$pL>pM9WmuyuP|K+%XI%Z|55peNM+ z=B&=g14`?Q-y9DVNiTkyCjm<5^{bE3KHkMXJ2`VdBJvP z+>=~5wToq|_?eg29fQs=>*%RSbHGpvy|qL1<4308C}S^LYg&SCXG?$0YD=Z(M6=P5 zll->uh+Sb7jRFl}6&kp~13DfS!CbcjdKh><=O4u7+AYyH-aKzS#2;$tC+<%#9TX8c18oE*&DS%4~LuEvXk?_Sx0GaoSbQ*{JTVCW}{K-U-jNX58k0HpW+ZMP-cdJ5)ELJa~b&1^%nRoKP|yogjZeR*$zBa zqiu-bp}5z;uyi{~@RJ>;Yq&*XRoftAt{8_o4R6s(aBVn?In1$}dfK+xUA4|pSX>i8 zKQml1qzDMiS0=y-bw&G#cm_f&YC@Z=Nv6Mwodxp)vl zdlGIY0VS&>F$v+^-Tel7jRL~?iho4Zk(h|O#*-aRRXj(S3t)(e{QgN;(GCl z%A0s*g&tKabk~l(_N?l#1GnbntzpA*?U*Uw4&5@FZPlgjEqA1zbhjbXYdhVzrzQAX zBlg`_zzpyuV!oHzULZXauXD0L7Oq|->b+rDDmo!94Og_Khb~;7IX%4~ACgoCv1aKi+ZJtn*O3i=E75o6?z8rZx6;k;s<2}J2vRcc{qEYb3NeH>32%jo3 z!X7sSE2>;UXW7}}*;>t4e1t!4&hc-tXMRTICU`*YC~e2$Wq9XM zC1lr5(JLpPi66T#{*+?iQ%a3fZ{tfPr}?Yjs9h;OHS)xyyv_1>aLWQ*RP^M%JWO=E zoy@ZnyC;6uzFeVy!(!=t;whhVqx?aTsM+?Esg@y^rgJ8@{{%8Rh>4faqyg@Td_lzb zYZzS&pKe|;I_2nAW1piIml06K`*|bxY6y=)i_^W*tD#d^iQ)U&A81f#{8N=vt2~i65ZrE z);wkbhbYvqVdCkC;27J(hh*TR>1ti+-@m&hntF029`CNyHXIOZS1%#No%m~gP6;sk z5Adi0aHC|Q!id6O<%{_)n^P71;LZ)&Klh65x=g-vS1WqRX^Nzwt1CGNcWC~!m2Z*$ z1}*E%+>wyPQam=@JXQL4)v2T;7)l!Lc*B>&mB+d4La={OQ@Yv>tiML=9p}03pFj5m zTZZ$Bn>ImTMgwS-saF%r`R5fsKFgc-etbX)%LRau)N>Xq8{@U^J1Z;aEiO?S4ZA<& zK%8?iyDbv%`oQAW{b8q%lj3O#JDif-f}ulQ+ht?)B?x>{qw722sVy0D{pH4#6aW0> zhL$pal%c2XbFZe|!E0~E#6Mjc-y@gw{FukVApBfEF$aw1*ICtDEbFjbjfl*w?xU12{j(ST9!|dBHljGO@g)o<0LWrAWR7mlSosD5X^6fh9(|qIpnHffrMdVRKj`J*r-&$LH{DcM84>ws_ zleunFb)(%U=f;CAm(iu(I|KNkbrIPL?t!zJf0xHpZ(ZNsT<3?TFjvK*K>1J2oYAo^ zSB<^IyP_YJHJPC(qxxCQ;H>N_AbX{LTIc-IEN7oae1-aE$ngkbGuYb*Y?C zu;<3|ioE=IFRTszSy$OguW;<=x9)Re?nhjEztb?5L6HP?y&^GZFc!tcLR*_~Mrcvj zkk+IitfwdsR9Ev=RFb5ngtS{;G)3#|(+h>O_mHZ}!nYn!QXc%`{Qmv0{}(YL^-#p6 z$fqm^Q<^=x-m!s3VKpYAVqTkGd4>>7(;Tok`#`|g)d#E)5+BB_d0{9A2=tsP z@=1nv!B5%d`N}yJO=T+`$wuleF>%Uk=4$&(vEK?(K3#U9w!yuD7jqYLOvACGwGcju zFbkVJ>>6S;=2@gD3YnZf)SoJ98nm0-9?938n{6LFTj1|kt}PdxQKXD@PS@qqEY8vs z9;Elx@?8?ED2n}?FYA=u4(+wT%S3xyWI|X%3r!*>#emr$DuT9xQK-t7u!0${On}!f zmM6Q;_T}G>Z4c!+4<=nW`+#oCPU?O`s9x~hQ?^G*CJB$Y1c`EigdVJ)xApvTWrBkc z<^o;KlylFx>Xe3#vg|}#Kqu6GUrPB_AoJ<+#dBMtlYU6Q9&_b0 zHlp|2oZtHc5L#0e3Dkz`6BGU?ot+t-uxV*+ZS$Gqg)7H~xT*Q9E1R7gbc}6w?6i>I zQWr*i5zb5xw}Zv&!IRW9g@4Tthb;p9i7eMbN+Qi9C-*n)O3|5H^@*3W=ba3&D@!ex ztW6sQ+9@mhPNPhdN7dL`0dfS2&uhY$W7FLvlYCi<*tc5O|}X>OoqF)tNZ&qtMv)re$`Phti4_z3#&K|o|i9Grumx$ZL4|o z^}^|m{)0L6#10n>l8<<)(VmhgpA`Ew?)~e>mg1#V>=o5#m(k*^P68?GT)w4H%C_9H zkgk!T+ZinTo!psjIu^)JL;tf$kJDmB#;rlZUxD&Ii=xrzEBG8;_+?haOjOm{mHMty zss0W=R?*+<9iAxt^cUbk#G7}v0u*K0X=Q%^)5_Xw%3Kv9NR$lB86yY&dy=}3_RK2 z^{9~ghJ;^{<>mCDA@$#`(8<<r>OWB2{7?eW6(A&l$`zj3+57o=&GWRPWonNhwDw-JL^c%&Cb@v&l8vmn)2&EN|DDocanxSpt)}>z53@3#EA>oN;YMmB(^?Ayw*mf3Q}>gX|Be z$zab&7fa(yw&kD-}jRsk+T&G+IlHz&mx1Iqze zVQVRhgz3PLtVWA!fuF$aswo8Y9qRPT$vHYA!5C7TKIU8;SHT^CLD#TyK>2%&J^h zz6_GA=74uonSoMTIaGN9qEZw|&l}~Dfl4mwQS7T?PDTbM6L%X76yjK}Fd70u4&i;h ztn9o;W!&k4EfFc=u6rKt!+J*VZxwl7j*c!enLPF3etCNerjo#GB7L;qkd4A(ip`7m zgnpba;<-bd8+%+mDZ#Ly(RCKdgON}5x~hD8;``5t7J646&A5zC7t6`SZ@2|oxteEW z?ArQ%{I}aWe+WUE*7kf1Z{Ymf1F;+>yK#pN7eGJ3w}5Qk#oj}mdw1U6O}vy|An-^Dz3l4j_2TZ#+o9Ti{UQWAPJFFyViL;d&of}ybK~>vIHk)@ z4X%3t%l+F0Y_=Xp-Ibn7r|TjELzq4Qt2qe0B7Q4+Nw*`=FLhV2y|ytSOfuRtsV0h2 zO#6CXj6_!_`loiX%?>x-R_5q3YHulw}*Cstji zM8hoRhgk(H(7rlMr$nDT^#Y zTZ{pO7Ci7eQ%b{EOh@@T`)^`PJfEPRiQjy-w3E{vj@|h}R5{p13q&TkWY@=}Jw2g` z*txBh*$`1RFGF>snq16n^;WgZC-*~i2rKv@fhj@m`gz9Xwi8TdVe$#nfW|4GlufV_ zXS7AyF&)p-;CRvZ@JCgrr^%mS{Vgu);xVI~>mUPSa`IEpa-04UIto%aeGHVs=`>AN zlMyXYe|er2+A>}}A=GZP3!p-|hJ%bD17kS$qb{1&3!U}#uwkzcHv^PRJvHhb1rkM_ zaHBy$)iKETGx~02sg;#wcXC({s&aeSZ<)5kk(P}2HgxaMq^EhivNXc-R(x{@Y#KRl z^)=B9ft6yPJ^W(6mkH6v(*pm-9jmYBF_*L}HLS-}Do>HFOrP$6Ns+ysqj6uv!uB?u zjI+(+=E5GlQZLSi*!R}5<$gPEE2-e!{*t!a3KNUp*zsD5k{3_w@5a%Od@zr%*^nAV z5D^#+Q94*^3TQ(2RtWercn)A)6%)&>)U0c%>(jL3ekS9}EYOo?y>(S*)Jxr#8CbC8 z`CDr)Cqk!6oZ=WFRnFCvpO@L1!iu+|Ofko_{RNAueqH_rVn3b`7~)6U*F0tKNQT_m zM4a5rxA+vqX*mwWfpRp19w`F7^5nq*jdymfqH0W>F7W0ZA&Ue?qIMh1=L^*wa~ru5VGv1Hu zpmgGM+!>E%_FKfDw2^u#S+_#(if`Wj)HkVUw3#2)mfMrklaeT2hhIecZ7t?&8w-pe zqjhJee3hA5VuU-CEiu06QHxS7#U)2fgv6(XT%Zj>#-4Rg@;kh{k<3(@D^=v7oH4f5 ztA=_Mclth*ALl!SD*ei&qh&1=a_w-bbF2{Xbr=2EDR^qqpE!8nehXQ3AEEf{h;=Y% zGbYe*zG*t9F{H|>qhOR3Mm;9#v}~S!Bu``Zqo>iukdPINTRqvS%CwPL<5d+DZ?$nwD)@`N>Ob1i(t;~)pZ3?P{ZHs=i6FR)CYoHlXk$Iu^c>ak z^`ta-@UQ;5eT@$z78aqI+twz6jZF+y%!_}{sj|>G+kQi^@9DOAmF}%#I9lJP;ucPM%Of^}Ep9o!!Q<0M8O*qXPxf|H`Fgu?(LKspW%t{@q;`T=qV~xg?{YFt|Yz_QaV~gr%_hbh)>{ z(JN3?yIWP`Qns45%ALn>ak_?$tYg+Xf6f|nnOy7$wwFdcMz(W#=LyIlX4?0B;0cC1 zS>j_Cj@k3i>sT_xba7!>579D9dNqEFv>isx;kkNw7z0ylfnkk;9&67v0JVQx<8Qx9|p4pm)IR)Td%$^L|i0o4tOf!zewZBU5|p1&E~Wo3va;=De5q18cGi zJM(ovb!R{HH6=u_tViiKx0rZlhPwyZSBdsg&pEj|y7nPvwZ8{rapfc3i2&Ro-g2os zF)p!Fg1W#P8l)GH9Vd_oAqrJ2b}_%2Brh&|MKA1;9nJ9@&9qC>Ho-(G#veS+8#ccD zwu&G1{Xv*gBOc6xe6L{2_ByQhG^ihNzn2Q@sg0#N7CwX>xO9w~ufgZXmkz#`)xM9X z9JOmhXAI#xQW+Ko8kXY8D?HSZj}AWSfx%+st49=bpkt*ILqP(xE&W-{O9R zYd(U7BLY?6O!R7$$4prl?h@`azVRQb_uk~5xgUv!7j5(^BX;tWLR>YB5SfwF`3`x) z3<*F!BXQN7mCspD1rb;pwHt?F$vd;%`Ox3n?q`Q?UgZOz*Bu1QXcwnrgNS!4S`g3^nAg7S1NJ@ zwth}^7S}WW0?nqo1QuKbn0D*kV}rKGQ>FiNl?K@cbga#GmS=#xmA-X`BRl5pZ27j={&n%MeR*S_W^i)9e?%nxTn36o`eUI zEQANOG0N%ZiX+NaK$qt1cnIi95zKb0JTR84!45mj88hhZN{6d`A3uBmE_s*R0F9{d zj-la=ir;!0{9oclT9;~NTXX5QbSTZB8c?aJw{_u77DK;@XwCh8vo+xcGEkw(SGA1O z#;>tyR`r%ov$us|Nj0&k5v33Kst72iB_n{IgWmfM{@QK{v6i;rW%i0V zoG_EGoydL5!EBGEGiY9J@9n{d5mbg+OVt`Wb|`hJVsxGd<3JijO^Tztn&)Phwz75G z8Y}>UD+2bd*b;gq^%8r)BQq@?=}CH5u5boNhlbXrW2J^#aq3+KbsH^YIudSgFUKT; z{p7Ou^IrS`fLf5sI)#%o&6i<|75`CkLZwW-j4D;ZN#N3b3eT8#)4zmOZKsdwEjdr! z5kllT^^g7Dz`1<+yEYL(w-gk*+i=2I8~**p{4B2wdzJ}Je^5Mk2SRO zD=sF3NV2yyoPd4_HXgHMrWEo7J$u5drXd1woFKZkHp&3G8C=*-!SU8k>TTVFyJbEZ z!QUaMaIx@I+-g06FxfzGfaA)y1Rln1$6WrUl6_J&`}$_|rgyAM#j=ZmMv9-*R62FB zl#A7)9RXi?%<_UygHnUZa{b{(xmLLU83NuABgJNmdXgyR#!KB6=HBKPk1oyjD96n# zTaKL8=3-rFT{&C$Dq*xHz5N;duNmAht$Nzjc#Sxv#^6BFTBQ^f%* z1@FG%fY_~>z*=2j8ZJ5!^LE-T0Oyuq!1NAJ!zvVo<}*zQlDbxr3AKM>qNmnI`_m)3 znn%XMglB`c8~e=Mhab*Iv9*C`jHwe;o9qgEha0j?rjDeJ1PKQVAAMXaN)>iWX>Opm zkqd~PL&UVCkp?blu^w!f{=U#*z}|>(Dq_}1AZtUc}Z7? zN2WJUl`>IPY2NtR_U#3`ad4jAVs8JkxPw?9C4 zZQX7~J=&ga?n>H{m}N58Uf#0U4)~N2=2{ThOC%4@ul)u+oNb|IK3z}bFx#K-N zu-UJPRK3QmUlFIZKdOp&EUJ?!hTTW8@3ry=gnjG4af}wD%szc(Wf6~U_&9BQi|6(qKRQPdRj>!v%axz&r zNH?Ui{uV;c^&b*i`Kg{t_IuTnJ}OEVm;!*5uf2`VJ%d_|G9={T45o0G%*Pxcp-|i7 zNqE9rei4r|xaNS~^9A#sze?Z;U(>0r5eVx>h7g*1mNRDbKU|Uu2j7+yA{f4VIci$M zvpj5U0~B}$lSLpYd5YJfbg>DBdb^fo{EI@(Jpge`YJ%yYK$*RR=b>3T8RaxZ}C>pfTp z=XX=v!R=@&p#q|GzVy4)*GvXEmeA3Vi}!Bio)fr=-{mstXKc-(Y9Mbl?Bf&b752?Q2{yX>m~mnrM{T3878YUXxo`=VMiib zC4jHL133+Wm0=7)Mmn3Hv|__~kl{9Dbn~Q((h*=daO&Y5%$Zb~g{P?o?kfdF*lNvX z+ve+?Ra87IPNz$yz2sl&;1ynGH5RHAYN{*#d0W-r2d266w#fCX%h&I^0)Ayx;$>QW9TEmcdr1F^<&zsl>~!Ak|KQN? ztf&AtioBoR&!NdVMP+pymJ7HlJNqSDT~@dDe<=1CfkF$0W9$ZaK3uecBlVA16zuYS znVF?r@SSo=O?J!8IhPS+rk7k<@h{cPcCC51WMc;u`ItT6kz>7z2EQp00EdUuVPHP;e}OX8lPyXHF(JE(AP*O z(QBck1Q?3E%rbt9gKN`k4<>pTjwtTj1^DL~`GBY@QD!S@KwUhuRyarJB)xt}kQVZI z!*E&If?HjCX>A&)#LGbXN>4g2P5RrRF9M_olcxF_s*_g(1{&OojFqH$dl&ej0373J zhT>e0j2BJJ)#?4#QxeTmKx-IK80OD89TAucQV-xK2s&g0H(2r+zRhX6hs5PZz~)fiMAs5uflqL>rWEYjG|hTnSoJN4%N%SoUt4mHdyc9b^%cy>~ATjrc41o z9caNkoVX(-;=Z*wVPuro3AYRH=;v2|qHJIv59GH%1?T#;qDPHvMT)S`j_>oq`U_TI z*hA}cqGY_hk@D4%fAH_{>w~9yYfp%c?UD-gfS}K)*=pe*N=AAnoRc8Y+F+mPTqmZ3 z?@L}AJVllafAB!dCiF;v{rKCr74{+CH%c{IAxaetMpH{><^#uQQ0D?3*ANH4WaJ8j zg_gvS2uELJ=-ic8NOyD}-YNS}rsO#DY!(n$Qb<0~`_|Mq0CJ7hh6v78`LZDz@Bch>{V z!Q4Te`zr9F{uimoSZV4v_WX?2*d^#u;=aQr9#3sa;sIW*US z`QG5LUKwf2W&77@@El%%A=Q3JPy#rOs1s=%Bne6ifq%ValNcW#8yjy^!#hMzCfC|% zKs*)jO8v+AbMD$$6`AX;QfaYqEz1*0oTA zSLES+nP(xNJH05CeP~c}fl3Ho9zC$+`)=!oQh56hcbik>elaC}jb^c>e=@|If3=>6@A#6SvWakAqc3?h~ zk>?6*@>+VBs~jbp_#r#&J=y93)~c-t<svOzj&&>g6>i_XxCZHohN<-wQ z%w!)Sf3u&RB%An;j;AbRu&u~`2jUOkepAC;~OK4I-eV9=ztSA6~=wwXs;;kTpKUADM$WJ_J|MD#?@cpG&tk ze8aHuEriM^6b}f4G^CG8kw5x{qse@Lf6Ybm;)j;Hg*;L&*iMcIF!7It82W}e6)n=J zJZ}EqQ4IpUBX?#VlB(?4D4~(|B~jaWk!aQA5zvJ!7qxUgUyuvl!G}LVs@6{5!jb2V`gM2jD z4T$mG%elXhLjD;Z2G%yi!~YV?gxMvZ{`YJ?$vRuTb#(CEhJ7R*e6)NC+;#V55QU)c z`tk$FZLduOvokDlbD$RRVnKyP1(IQZ((&{A!OA44?@AGuOw`mw$dT&F^TGn3y|wms zvVjj2DPD5?ACNxVL*OuK*2Z&5BEOaSDeO%Q&kIUiZVS*C{p<9}N6KdyroJ-z6UW-^ z8ufyfqWq(j6%uYt)G41*elw%8`1#^mJ5`h{Pe<-?bZkv%>eaN{ET>IZ8G@|)x&})o zFO8{Sn^tMXmbzp}xL!V|T4vRkF1D&ya3HfgcN#d=gEwn=no(;+^Ut8@SpTP;XDg(kg%}KUti86rQ<`Z9liaB{fxgHQ*!-j7KoQCO)m~HiU3van1;Qn5Ra_2^#LjyF<*RUjA}}F zPz|tgHQ;JCa0>+vtTuvz7L{k)sZto$JZVzs_{G~D=yE%f#i;O3v#_C`45yYar8O59 z2L%-u;~$#^&M`WIB1|tSk@aeH7+q@&G955TDhQ&t522sQPQlg_7qa;Oh;=tf^adcGSq%{c6JV6fgL~@w>JUG zx8}KgeNR%(j>swBZDh(2*ZuHV7h;H;Bb%Fxj4m@StseA6;0e6_Co!haf46>6ZCY8l z-gHoQEOn>q#68>Pt;q=WnPQv{I8_uWt9nGOw00?1;Np>%g#}9K*yLNaLtr96uVs?K z_4WKAxObrrPl?6NvzbR(jQ-TAF8A*?_}^^2LQCLZ1Y%f($e&Bn&5 zN)sZiV9pzz15V{LkO|u-Zf%u-r57f zkU-e52d$8rgp7}#58>-qHpT6W0iSi{Gn)efKp?r)PBLo+N4rBZk-Ati@>doCf1U-m z#8W%;$?>14MA}dRcjLF9cLH-M{`u1?`7+C`TG~%s+(Jd5T3uRW;6~b;flY@HG^MVb z_AVTuS5N>RujZd|6`J9vga8W!FrcHz3qH`S$UJ5N*{gai=4K}YZ6**GQM=pI(?EP) z8VektA7@|s9A5KsX$f%VkuB3>V{<9U!WnxR(a}j6QPDe_or3r8Q+<^LE#TZzD+%=M zHDQs;N)ZttK12ZX$!z+oe^?k!dX{KXO!xhJOZYj;2vS@Xkd_w5+bgm&%7$hL3u@+f zb>fiaRIidqVKA(vgurL)-xmlmV$_`zKO@K2;;a_1dSuG3?Z*LFlV6D2o z$RZBD$QosMA@6O#TkI-W-@XlJ1_r>kox_!aV=2UAQD^DkJ^;8qmBO_azlGGCJcc1W ziyA^HB6jGd{OY_zg-H=lu=#~#=|~1^p4<7V6AXWC<=sA0av+SB!c(&RT6M&kg$C^I zSo+v5{|XGOuvpc?NpEzuhFxlIaPC~MS2si~%*AP-<3;cR7b-B6pK<6wJi%mZm{C8Ga zRtfiQd)^V{1I77>8LUXw<<5En+$4LgkFxKWv-8G=v$J*;N7acF8CUYpWLzQmx*`r< zcleofv4n`K-7=zQMmsnVz#0O`qkJGw*~~S(x3Y677(tj&MrYA1gOl9xLxwR&2srEHO6| z6p*vpqSiOJuNyveLJa7JjiW<}0fLJbwq>6)>XD{XoZ*(o3s0PkU_vLI2@Xoc$q@kk zXgnbS{lzFTZaJKaZn6cMVrd(}S|#X|s_1AGVkV?MDTz3r0o3#Sx>dClFR+6JKmI;C zU(>Co2IuShLPl8yMOYcsB3hZ^2vIzAeDLc2Dpifp_8egvq&};Y!+tQP-ieC9LVOIi z1bOP{AM!r4%_;Tf-h6~-v=L5QAPD=G>nP264H+vUDk}{Uo27Zb!>(7Iuq1rF(VF+| zMxYx|ec9Y(bo?bRT#9%=9kHb-vn@V;Z0l8>2Qn_M5*r_fHt9rR6ziC4Y|UpDq6t6& z^8Hb0cYi~FLYePHtymUnoIi|ib^o8`mcObhij|fStQOZ8jm~YskMB6;OL1HrlVYaT zK9gQqN*MBS;evCr7&1O&S5uZ~`t&qW9{sSOS^IuTl>F30t;EUL;D%ISxwim{z zi^Nv-{R!&l4t+$F<;!ijHRMRGn_Q(KByCoT`0`k$kHyMPOZSm8|JUY}xBM%nzb<)} zURCOoJtl=k#4x34sCt$ujOY% z;i6YxZ_K0AFW3jhBhU!P;Z>zIdQ;(Dda1`Um(tZ#Q~VY(2eMQyqQ!}FFwW80;*#^avK?*`SPTb2p2*KGW%zsFuY zOkGyw-Nvpsa_3yGjl$M@&ct2|Cfe8|Ds0Bs;8xxQkp?u@VNdTDD!nJ9&S1L5C3>Ej zF(Hc=O&(p%Ddrk#C|R(5sYR`l^I&#!j3 z(X(0Xd=)YMChf>A<&_$@@!9El+CFtdRb5XzJ(oApJVMxb&jYoku;^GjcjGf}9Ai;`q&3W39W_h?OUfpN5!t9rDxDgbE z9P<5rp(IOkfpEWqb;>!9*)01(ESo|9N~^AGxZm1!hv{)YIlcdRq1sPrMmoE_c& zEal~G>%%G8OE(o^A)&BmgSKT<{8}dRbZ5Zp?c9>+=yeBfP_QS)%EY0X;;%qrAtp&ezTVaD4Ti z7scz;96FvLkDBQjEKbBuj8@Fxw_Wxj_OU*DX69;0sOq!13d%>KqRu13ldOZCOm6n-2fCY~yt*#&PA80(NG)W}GP-)up zscj`<`3HQk=1i>}?cY_g=-`0UFKl@9j$PSdewAMh3*QV)F~k;b@Bf6I+n4sW05E8f zOHR0@6PdKp7Z4e;Di*MhL{Y>}|M|v9N|3yB>rs=DIhP>=UJfv~hPJTpU^?bsKR@m4 z++#I?8MI}+nIj8(-5lxDJZ#_u z><15J7u&e(aCxWsqTrfLS2pRAf@gOu6X_p<2|?ty<+sV{>Tf&O%u}DTmriS-WcUi3 zp2;n?J*;?y>;14EwJt2_6R>*GHyBxr;iTRtgQRGSXIJ+ zGtjDA$_@~_LWb)H8^0pu@JR1gy~k<3?5?$R6Hu zc%H-Js9g;AG%L#}!uI}sXXpFbb7dul8mdzHTJfL9l(%i8(Y9^#!G;fXG&Lp4iioJp zpf3iLzP=XZG-Q6;%IbSva8#6c{X$-H)T60JBvxHRwG72xZXa=r2Bvi06IJ6rjCU2Q zLgpD%BlA!~SlR4`qQ||{jr`<_AcV)FZh@;mKs;f z;1kiblpq54Zz={TyVbL6KfmM}DsDb-bhNN=6eY><^;5=9HFzw~G`Jx^5dKXjC^=wT zuHcW3nlMlnAw0w3CMJaD)z4m-XYXwmM+7XjHpjg!xcf>-SYA1?ZP%gF;$tXdTYhJT zw_1jNllca)x(8eXRe!`=s;8plqHJ}EULxnMrBo@~aVMF><2SBG9ajOveB>lA_B0w? zir8O5(p9*mVsET+1cfdjO&#l0|5E`e!@?h}N$J0TbGI5>il4RZM>piNv!hGmeC6>qW>|=JNel~-vEc3JHhTl2c7M!j)ZRp*qtZJp-ptf1DtIk`50>M9Ye!xlo&B)( zxn=j&nRG(#Vph+AMD}@~t@WkJWVAv++q2`kyTXv6Yhh=LzIyLDswz7u6ka~VWX^&T@QiL$1z4o%UW`<5b-Fbfl{E<3aVI zUb<8)J>rdo{;rggTT#;#La6Kab0^${btbY(b95yZZqh!M+gXwgk;}P_4B?!W!QfR? z*V%)j4ys30Aq%zRcZP*JWt?YTb(Z(-TZDgU+!@4p*;b22o|I(tv5i3&^)lc{N=v(p z4vmD}(bh0g_C9Mb8FVnp8ptB#>!hu(kEf@NO2n~F3>#j%RU#H9TAW#>R*;lS)qUS~ zC&;}BI7e5L1FeICu7_=4oG4Wn^U@Hv^B)?kf;d!d?6ZYD!Pk#w!XO247tK)Pf#Sve zjcex>w#Guo8TCLi9%~P1I(d1A3b=krGvc6t>7;Pl&+DO|x`rvz!YC~v>ZyFG?rufu z;IfCtJ^WI*z-?FakooZ(BV=-VE^jjXX#NdMnpt>my}pp|A=J>&_{5Z9xWirfv=sZW zM{QX{%q>WPGJL)U(tXCkrt|X-K68 zGc;wVrFSnI@*YCdr^cG1rL~V6Vh6eoh4=u_RJpTMqs)x89#kG>$f4B4Gd6o3?y3>0 z=jOJYi@)?SN&C(v)vLC8?o6)0`De~C5@>#Dl9#wG_9!~fqRBXr7Q!7PyK9^;vCPge zdLaDt&XGC&jTpV0z609CGu-y9V9m{@AYpj5L9Tgq?CKtgr<*J%!5&Ldz3C_enP-WtnZwy?j)ba{WwURRE}r2bYv9vI9f zy)U!)xeRkQ(kCpgmAI=a^{ejlgCA7ry(xoKU@iWlvDV` z%a+sl4~EESu)GR_MN`H#RV{Y56p;+0ML`q(LnURD^+^YZ++0SjA4j&|!RVcZU-dLJ zU-dqJg@Z^v-ST?Tk>^nOnVj*NS6<<7nBfUrHp=oL)+*G9$}NzRZgzIjR&nljadvibaaKJ- zTAYw3V*Dyc^Fjy_(@*fBpPt3U0JL`lnL(HLDNR72zZ%XlnJ?|PFU2^Gz2Pb~ZE|)o z2}#K9-F@-*eez2!9e1L{LX~Ijo^v*lVi=Zs0Nqubp9IpPwE>U>>}?;c_JSw{p`S zeqQcsH*3cadmfJR6OpODx=~e?D?GFu*L@#skLcYFd&G~f?OJazsma>QxtGtYpS_^% z-^m&?jV~9I!h(m%6Pf`>jFYCDVnUA9Wp<=lFMdpgd!jLSLwjU!dU+s$EkZ#uL z9xjzsP3!QHgAX6it?`~vK+fHanXEF@dtr~coy&+W^j7TXliZUdxF!0Y%=f)+9bxb@ zU*RhuTn)2FAlHz@i0e@y2lq_fMU8QlAdX`$qb6o^e#5Cht`b}XZW!j)Y}n`ZZKA}) z#bI%Ukt-(pPKAu9oX~IkOic^Z8d9Gexd$|sn+ny|222&U$sorTU>1M8Ex_m`e!7)} z_t!T_IYscgch6`2W?@-hNc$uHQpIMI6n7*gEz-tze9|WNE+Mz=`O%PNcZUM7;!Bk~ zY`A{w)%$+H?Km3cnC$qe1{LY;scJwPWw_(aSpA+#$aWMX{Xq2`2s&<_cwY-+IWBWO zd>CU$h9{bhNEKLEjGr`gm7sN3px+RQb3NM9ee@d)dE2Ca+T?hRggmVn04=)COy%Oc z=h^M1tbV_US26yhn<*FYQLAk*hZ8FaW6>h2c0zGsB){*eHx87K`~I!sLV%7#JLJ(v zrYQpOr%}Bq$QAR49Oi3d9GhKTU|f<8VHqhD#}|Ik5}oaq~y zaNveTlmTO93$MB~4?C1LhfFM>Gw6Aw?ugsClf|C0hSc&Jv4{YI2TEw$K3D$*D{uaG zrV%0SX4!)L5iPwz50}o!Bzg-Zaq;o-idi{%+Jbs8Q-WeT!-8>QFEW7b; z57EeM+bq{_#`>JUeZz<68oG=$$ga*r-jE$Sn08XZHAHj*qiD_BEgbfNP|Z+b^Ni{n zrO2SSLpYaeFG2q3qM+=F+ehoZ6Itd+uHdg}$uOpKu;fgvx6;tFv-3Jo0!uk8&w(<* zdZ`E^R)sT+lJsQ1r$th9dIeQ=m$ed3X(y(ZioxL7siXQnAtmn{_;En-<>agxn4s z4sVKgOV0COooXC=*H{sN&&vK`YZvuEOIE?!VvpRf?ww2Q2nRpf0TeBx#BgxeHMoWp z>PN|p|7^i-&#+a0Uo$E}RB-vg&Gf&_17fTo;-M3z5 zf3xt|VD=juinvHdTxx(SB_Zs7Kl1tSW4y^AG6)bW+PT?POb+Ru9K1hHGe0wC1QRDz zt;A{PdDF7q1GrxI>QD?Xx=x z&rvi{2frN=ATL9HU*TRNWLlpduhr#U;Hd7qa<~}2JiNRNK%(SFFCVYQ7GH!bDiqHs zR*p6C5aUABZZ77hyQ*Hw-)@F!CEqu~vRRbvQ;A*q5Lc69yIOVQX{OD^Eo79uIGHo~ z{oswdEs8XWr{UIQ)o8dHQ99NpzZdvXPEvbvB==m4TKZT@GzonYw)B0)U5<~2>HlO= za%$tsaI#8m97kukK<=B>GbZT`tNC(5T41$u>*RWMbw0Ei1wdJs;2)N6Q6hH|HWsMp z)dNI->|B}08>q>sL%{da&;XI1#<#K>HyYw1D%RRMVQ4+IPr~|8cl3%xTt!U)sN>SL z1sERt=T$X9k2)gKbjBxID-U#O!AOGMBXyD78!P41;?mur8lS8j^y+f0GtpzcMbv?K z0Zu17OqTPUp)wycvw$=)rOts=KQlDsD3(CG9dJ?dXrZEyz{SMaIV~3u);wr?*Op^N zWU-O3K+*dKXq;fk?%c4qS!(N5aPdX%E9T66Pf}!x63?KcpHo3$l3ZufWn|)X^uaXG z@?Xiphl$9xxo3e`F6;F_kxp{t1mt*6WGIcy?l4dGO>d!Q19N&VH>WEyX18aWu2_RilBw4mAuKF zqY6bi4-l!4`DUyiA$8(xHINN=^o+?tZFP6L*vtFJB_1gkD%p?lM4HZ1O-0WM3RoU5 zTI$6VMeiiHIKTf|IjO2z6k~JTP{m$UcDJ@c!%0(peoCssp{T_3ZKygMKSqL2= zD;vp2(p+4Dt1m9k0E_5U{vk%XaNhrB1=8X-pHlt@;SEb>=fF9*mQ%of726N#f9lpY; zr^_VvR281_8AHS+`z7pzmMea8#rB*@Xp8X6^3Lc`X^6^|docUL1m z%s`0*1MO1nURAafwPpKW)h_nZgQ+=0JpS=cZf?kdt&C4R+a$hnw7hLR-azbXX@6gm z0)*;pw=Y!sIiH-V0@~S29mFMFV1FFVyocwT5!EvT;RxTfb}tt(6qxV*O*zd zdj+9yI(guy3_eXu(rnw(M%~%0h&lFl@zn(Ru|Y-HAZ`7v_6xTc z^1RuMXx5u|AOxgj5c6m6h&%(to^2>opW&h(XTZWY5mz?@lV^aCOGYXsTer{EmWncxU#)p{N|q|eDr;^)$x zaz95geiz6HU4bohUtQhZU0ofNhcLdb;k}quPK+<0ZcmzFA+e7CwrMV&7~0M(dYfQYg@JMiQ2GiTO1;@k9)rI9OFcDJgzy$MA@fef}^U4|hbqe!#|8?mrYwJO$?!E%;FuU+bMYzev+Bl=CM@c_H zR$2-)2aHxbICzny2#cL*^YdzvP0ldJ-*nuD>Tpk-&HY|e>|7=)@9sXg4f0zMc1jfhz5Hoy-iXOoItJ<4k z^kyt(b8#;$EDZDW8Y5T(4&QL5@%_=3zS$0XbLdNoaZs@{0D3lN4&Y^+_OG{1|6`>C zYMuU*?tw^SINaPE4xs)0X@nYY3^Oc-nUCb-JE0J;IcWR*d9pucjr{K2c)glhdlE0N zykA8D>F2k$k}E^&R3d3^b8D5{5{CfkgY^mUkCXqpwVeBO*VDam;=J-Qm%W$m$c7RA zk^Shk2ey;nzHFZ0Jnl;wtf^ICyf&rW&DMSd7$MS+h#L*lq%N(Vpwp7EtC>W3IK7+1 zDzVN3$cP}HgXIONp}+OH9R8&?GkIYawK4522wUWQhgXlo*MrHD2??)XCpZfJ;4J)X zcW->vX>L3+E>>Y{?R#TXRJ71iS+GBvR8v{GN-%}Pjg8@Lp^`s>?Z&63;w974T1lu1 zANPCTezc-iL6&ON!RhWriZn$cf+S1ou?H~y{r!z6(xx8T3~}YXNyfTb2Gr`K_As8n zlgE43h3@?|k5o(dqW>ISZFW|#U#)>svusMm{3q8oW_5K7uy8)xu)v}3-(y6gqr-F0 ztDam;i{#)?*fv8w%RHvf~eyR{)Z92^xVna?eC=he}iT)yGFUf_>J+kQ3mzSTe<4ds<`BPw?Mno&S~yGrPtz*g_^RwEl%8Q z-D0207IA8R4O=unhyb}C@Tg79Z9a8@-M%ICsH&WEK~$|A*`D32+2Q8WUJcu0Va_}v zAC8@Bw)?UmGN;JTmCA1kfA8WIwNP2fQ5I&J4MU4i3{`9@r}?8ytG z=eo+84!^v-XHX^A6m)a8DodJ*nCK9>$$s1UhL06M9nDY@7(BpUD@#A0;mu*}Z4K~I zIzF0afq?hGVr`s9%8)@fthdimOlAv%_Frl1-#z z)Bx0(&8#kAV|!_ETGYsjJZ5m8r;OTL(AO8RlG|4DevkgP&?g3I&#b9KAQoG9HWU^M z3WWcxbz}=vR&n}_UpbyuhLDZ5Z0q)p|Iwy*@OK#I{DsBOdnXoLs#W}o$>i{*)75u9 z97bzvii)@s!|qrLwKG~N*s=tfaiT-|?|&UDZr{I89>5iwdM;0W`qY``gLUGmbIh|% z<~`v|uLZ@q2x7OEmrdm2k!&rlU+Vj6U1>a8 z8$ngv6?P#fOuzQM!qRs-teN$qtm{t_YF1WIt09hq>p9_x#yj5lPPQjsao_SAcVdkV zT3-#hV+qDy%lw#{8Z1srvtET1s~+xQ5!3av1_Au_%m z4}MN{c6LhIs;U8D-xTUa%Fi3EsqXJLghrLJ{k6KD#9?$B)*hPv1(1!khyj0vrJZ zV1FQrLO?4D`8Usid+e~Bgm2b)zU#6SK~VUTOWQg2MA>R=|Hp6#KYVAlq84C7QZAgsIAtL%BMmxUXPc| zH+fT*$wiHg!NHA<7$<%+&d5ZUZ(3*>S!#DOzMcJ}EYrqu+sMsHZRg&}BM@+W@@5%) ze0|BbEkO}Mfe<&?h|J#kl)Yg50%J?Cy94H!FVD;*22@Aqg?O7OPw<;aU-6l^6|g-T zDRM~1kG7JX-H(WTZ5gG+Bqo%l0c~O7tHk7Z9{|?-CWs{^;{z?XkN>HLlR!HWNzC5N zK&*v!uBgNVIsM=r^y+U!Gat!ZAV+G8rJvxR$Th1rwnP+($*9Af&z>LfLY`3YTc0q5 z`>ao_T^aMxHY}2)z0e%_S&khY+L(6%-Lr^lkLk}(Egs^=OrgkS;%O*ssT2p$$(Lr9 zaH(JN^1gEFpQ4lbw34^#(P7DHXO@_yqa(UfZ2@1Y}K5N#}KgZ07$@QjUsJS@ddKl?-o5*Er{Y<{OG>b}1AK0AVY^ z+2)y{rLg6Fe|kJ}c3UEVt@K}{NTg-~-RW0Y+8ZCsVA@YxdRKaHQeJ==~%#ws?t3v|TNXGwBMOd)Ymh zm3s@+#DiWT1$}OP$gG=kWp@ZY9sow)I*wEyYqv`CUI{(NCogmU!SWPS;^X2H1TPJm zB)R%cPP&Y|PWIAZNU9r%zfwnx3xeZCg0LDl_D_Kd{iv3O%>jM+C)UOw!+!|8@<8cT z@Z;5W5}hyr7#qM-_^kazZ)K8WnK^G`B#J@sKxbCWcWw|%r-G3)R4lQkx(vDMJTAs3 zKST&v&SIv%*hcFfK@cJk)&Il&{)s39s{G3!X-sVc(ea<097KkHNXH<a;4U@AebqdXumwx0UTTA90r54CZk#i&@Cjy&hpJA_UiO}rgyTRwFqr?dkryV&WFueB7gpg8`BO^ zO2?(P7qWR&iUns`H#J*9@y0}!(J1H1Xyomv> zE{lt6MWA^BdY>tNtl!g*b>f4rAQ%19;Fg0Z2=D1VRN@3mZ|D{hQ<^*W321<#qMu-} znKenC@sKa}FnR7PjAdenDwmVYo6LWOf)h6&)E+jrpJMrl
    f8vg>m(o<=gR)bVV zAeZeu3n*`mV?H;BbLAvJoEt;L5tReEMT{H$M+^P`#9~&;2iQvo&}(1# znw$YvONW11S?z|sLoQTj!&-F13XOyYB&P=q=0Ue$OELKuO-nbX+t=}yiE`pQn}??L zmlK2;5>06qz)(<0Sn2|^AQ2LsfpCb#N~Sytep4Oaz6c1k%S&I-VaTk&zEIL8RDKpn zuwpbvv5tQd+7KcBhjN%KTvKQX|9#eV11o&ndqJM|$Ik9<>v9Xasw$(7c6wIU`(uHN z_gcK|v**pukgj0saDezVCC$cma?;ke*;z$o82zIKR8Y4mw*ZhX4-Y>~0jc9%ZEX<% zWn>&4B71ch;`}xfz3fts_SVnm^kpruTPB)aYJJuhnYcnRqcYI>DO3hP85+hKW;6NBKX@* zlE)Jh8NH{bro$s61j0zzWlIA6C^o#_?q*5OIt98Ln?5ttd^1QTK@EaRI(-5JbUkP0 z2D>v6zSGOoyN*Z&Jl(~vsbPk>EYX=`fVC#)X{KLZy=?K%=R?x_x$6`y4PPL=6V_E4VIS7hC+1D zFA&rlxJn5GsKeHAK~l1Gs6_+GH7m#d;LrX^(|yo=So+GYklEYAZ`#(@*52Nh^GS^z zP;S`NlvSu2sL8abOdPGgGfKP13L7Z8x};hMSNkL@R>`dabj6O{kUCQ`4Fi1z+M+|8 zQKd$yRdWW>sVgf0>9fC?At1q#$W#I)Dj-gO59%As2Pg3}qxfuk%#?ix$CodleO%7W zkD4`_rNFn6z(nWLXa4bNE2nt?mC^@pk;|g;X*ZvRSyObuG65Z({glA>s zhf+TfT?#Q4R0|0fICT*+1X995(3630J~n({|A{L_ac5802n$~lhgzcVFs;D9&!tp- z0(qT39{c-)#K+=oE%eVnmOWVVK81(1rf1KSeQdld<_zv+Wc5^3`A)XY*t6`iKhH6p zD}`Hfa``M&Mnx43YhK~+xqRu8JQ#b;y5ZSw*4AAHT|*22zfc!NX7e%wi_H^xbtY>+ zYlq0?Gttoq6y^qDHsl|<+EQ$*l_-NMTM}z8h5+3vnHYcE+y_oLI$EuYljtE7R#o%` zFwCr>VQp<{s+>2J}O;lShF*+CAA|1W_WtJ&$20KCZ;G2q+ z1tEOMQp+*L;EwP)S>)rFiD^3)tfG`tVK>r(d9uY`*gW3;EJ=U%+V3Aeo^-927PN;Q z6u2$o?K*sU+n!vc?8!p6$K1i;o>!gEb(+ym-x0VnX!eWzmyy}lpP+q~@{BlEnfGe4 zB1FbNN3$lgZBn4qK2afI$sjRjRj+0xZ}K-b`>1g781{$nt4w3|b(sS>@T$K87)$Nr&_Drx~{j0SrWoc8s2Pps8dhifBqS3M6=)m=*@KC4c)!$d(U0#mAt zvKRzuHJcwbt1}T%rKN_`*76@*ux0P`5xxoDnqoIrrO;rQ)|LU z6{G6r<`g%-%sD=pNk!zLs5~EBo+Dw5{oNtP+&mnK-yX}cpUaZ1IDG0E=Bsv{HNT85 zk4G~;QrsL=3#-Q!Mb@;%cUs0Q)<0^|PtW~;~88qfe zt;Kl`^$GROH%6oBs{rb}HlU1RDy49AK9bXw(j81JPQL5-O;S>kzTToyI&9@Xdz#Yi zMTk%IpGW7B^!%=VI0G}+i#+x`^e}GRR089(D zL^g-@&;9-xZ)>7ffxc7A`|RF1K!w1SbtD)k4~`*03m$9!U)fiK1F!Ll1xw8hE)^%w z4<05zb(|ASJ&mP~_U2-#<{Hw*8NP@oCaJAtwCph7sbE8d{DZH(hRten)kkB}iylTA zr4s2|O)*5^c>CTZ%k&q}DdqhI^|QPDdW6be4BQgfe6??(PKqO0#W7v`{)GU%06C#K z4vd`iyW+wfQB$j%p9_#3M2IgC;;7~fYwfTm9cgUF>q(8oE<%D8*zt7LF zbSnLX+}SPTA2k+EaXzJE$)kOHT2VyIWs}0S?Zt&39Ulnw8LiJt#-8EsoA%R0z75OE zifb?NPCQcn-bne4MSD1#F-CoI=TZ1Uw&OS zmJuY$V$4|uJ2iUOy}FCpIEl1f?p7i$`ok}Sl)vL5Q%}Ho$!Q-P6LOadYSg0Ik`YFi z(F)WX$`Vz13Ka_Vk$gZadXJJWOyTR%i_w$*9D`0Luie%EEwiXexhVCFzW#$2gphT4 zyIrG+e!mydA1;^gcjUhRyYzcg`c0wsQ9d-k__Oxq7L^vhQ<(b|<5y;LM~%!xBx187 z+RMF%6?_&+^jhjCSJ_(Lp+W`(Ib(<#@X`YFRq9TU36dpY28YLH+S{lNhbC^2sj1_d%?@$B1%#kX)ExauQ| zug0&{Y|g4FpR_;OE8l2Y=e%ekPo>aWiF3pK^pDr-3>N#0l%w(y^m&&81el1#jxYUKczpr(})Oha}zU+$2sn2;!D_~*`H%L z9IkZ6pDe>3Wk&Qp@eH0GX0#0_Q9U)En}_2jnby{aPR0dS<>%$5HU$+P>oGrfzAP1U z1K>FEq5Yj6pR1!@?0AgbN2Pp#KHU}ak{krxLATB_TvM9q}O*drnIi=sAuIOf2Ovr_>9|1 z(SCtvy~)CXzvqv=L7yt?-Rd#jHy8w`Ur$b47Cu!LJ(hQdYJO+_0$!xPeT>uMET5{V zB-!`FH~YA=TCQI&pCFyFAB+HsPW0a>I=kRqm*BO2TbmQjx69M{_~&%Y(u)>SY`N0X z3NROB-mLcT@)Q&E-9I5=&MMCdBLMtlmGC;DW!S<}MC|aF@H4{uZ{cT`i)}^n z*()%ck@Pjp(8Ir3v3G-|evBg3<3JH(SNhoGMN{KEKW+xY_z{X;?H);xyCREX43_|c ztI*PVCLvF&hZ9r~mSbxIXrKnbhJY&AD!mcYr1w7bu9>1~%(WW)y;=Q5<)Dumo?oi^ zRWSj}Qrgcie11hnDX%y09BtB&!1z-m14{f+{^~v1m%9uzg#L}xiG`%j`4>%3^mMYy z^)m3ofCIKZ7U>9PxlVP|)h%6O(y2KnP*FW>L;p+p6aAMO=Q|jUEVI8I#s(vh`wn=c}DQlsOB;OpsZbbma zxkM;ziT(TK-8ssfcd||JvyAn%-_JcsZ`vuqpU>Xgu$XihUE}TQ3Z^-X?S6XwL;74* z2u1m#aG%y@Sdb3gQhph+PUA02un`Jgh`hI;FEd)XRIdoe#{GzY+-3PHc*nD`<00o9 z{-dxxe|KSI9R!JR&O4fCW*+a%%>@I~J5)8IxT4*VRB?8W;1n&}5g`k{@o~lV#x-v} zW$tH2!yl)mh6a)ia0dN7GF(64>D`Nqid*ALj{+YbWf#GoiOYUxly~9ML6S#)Yz948D0S zEh%=l%9kdxB^e8~f6dI|;SJ_T~s4P4du#Z0X^txCTRVPi}G#%E)j zc1WLBot>Pr3#}}l7nHPYxvwiBXse|9Wjx&4Nq&U(O0Pu7Tg_v+@v-CdJ}4MZMr)E=hU5Pcz@M5#pv&g z#X_?s0)Of5Yr5ImryhE_A^7m~1Id?NX&WwR6m4Uod9oc*YnEX_#DxMiK(_)C3Jec^_SsNP@yb=}(O1>f^rANC0I&!UIc3)Q2$7IA6d{~T8+nLKZ z8;SIjEwN;nz;_3jj`p?;zmb_O8hUGzy871H#TfTXD0P)jt^%!?>Rlf}Ta2rZ=p9Wm zYG?nYQF9~S_WpFYR$S}`mM~@DsBWE93FZf46?ragJ}WkCEM`g<>QdkJ!MwA-S;wAz z6}*{|@iz}LRo17#;E$T|!Vc$pj)qRdn&sU6HUYTI0YmG!GlJc7lR9>>Om`|mKF zoVzFIhgYT1#V!c4yH1vffz6)gU3#9f>n`iwy81RY0b5J)@fh!pyZXi4HWTFu7f{A; zr}RG)YS2s4sMT1XA1NT21?#^nrk(T`)dJxxs%2~3NcZ!s+IY3Q=>truj3MF&hy!Xs zP}L{h(2W-p%884~wipofjXBtvf1$8p>v_A3%2Vv?&lQ)weSdx>Zq5Jk7+1hCTc+V% z;>_CNEL)vB>)-A!mQ?0P>hjbEU%pYS-`Aw^0c1uPbKUyMO`b-?Bjpp}5w(j=Q+D(? z2hKVvU7JWFqSO^28jNw#OV5I&q`0>yB+^z2Y;7$Qk7yk`5!HI3nPr0A7Sq0r4!HMi zbKU66sOICpN!YO$@)Pd{CSG>^4j2;GeqZ>6w&1TvPrb!}mxc+D>CSW>T%?h_ut#Q5 zi3Fh?t@-8OvTt*Geho?PZk8I{|79fCN@c!kECrO9zuogIQq{8_AF%eNuyBm-`SUB# zC-%}(E9=jKB$3r-#(%5E8fH9ehf$q#NM5gyl4kzm5p+|H@@bSHD1j$Y!p7c zEb6T9U!*;{sGC*46~OT2*PHci(An*k?tB5>3ufibz1e?p>s<&Hhb{2oDgh-aNverE zwiVQ*HQ6R6`r)a{3{LNT>jbWP+1+;p6B3Nm{_9FL-(MYWvv9yYzn5$rC;E+eaiNv1_;%4c8BqEpXkuEDkPUj#kT4%(Ha}?YW{A7Y44OJ5OZy5I44*ewySOJe4dUfZTo|kVNF1m?$ z(z}={%zj{qrRZ{IEDFmad8reyNZ#hXaOE{Y;U~T2bL8oM_N^e#qs;@AX1M68e61&_ zB*QZcUk+5qvq@7jj1&u14@j=iDK8OD;=P14;MEW-c7? zV|{7w-pTe=_o2bXXO)JHO$J3R!_wo2Ke#w4*NK_vn7J2X96w1ETtLj-?(N$@H{zCh zhW%9DjCs*K-}0Zv>>UC=14?s#IRoQlcYU$zfiexZ@-$)WC3>?(Rcleag->p+9F6*U zmzrT5HUSJohLtFvA>R5-x%k`LUE+1^&H@(-la~060jdotZ($~|b1UeJ>YAnJ${&w= zdsoB^4YmiT>X;zk&S~XU1g&8$)R1`6kXc zg4nFFHGHm5%W<1e%<4h4aqA&?)D48B5oj_oR<+eJ~;FQG6$DtPU5Sr;v`JH z#NcAp9<=qj)P1<_MZBBJfJ1XE35^z8fD}Xp#*aJ(<_ClpYFe208qSK7qMJfW8RGAC?+ktYI z_JvIAK2^UycCUr@<3Un&ZWI3EI)?K|1$gr`1CobL~=GA=kxvDPreV%^wfs_LQICzu1TRdP#wJ zSO|=qb>)A`>7bwNvOLU+WRQ{_nWoYgXoN}%s8ENDFTZV_s?V*W021EEtld(1;SuFL6UsOpu@hwX;jGF5)tC$-F=KvAaChnz0z-fkNw zzGc{R3XFWd?ORNFP~x&owniSqeWBLQHeEF8x-?)latBxNIH3aa)RD(BA+w4Q+bEFX`F}%ckTTuF%R22zDH-@@izDV zCDZQYxkxj{3$czsMP`Wy^Xq>@qBO7xL{Pprj@P%N+PX8_QH*g$Y@g1v3Qd~KFl*%c zV=xXymbJJ*MF0G{Mavl8%Q){vYWQuaDTi&x^*RniDA?PoaN-mWSbRU9%X&3!=SV9W zPm%`^u1WHimLMYuR65Y<7p3~vTUZhG=?RF6Q>-3|vlBz5T8dy~rq4Q<$vH7W_z(Cm zzY8%-N3x9c?(uN>095X%Hmt$X2&kCDjx|m4?8M>(7!ZhXu41OL=2z#LGMBMvP*`FM zu^scvKV`cR9sYk+Gya0fwt>hhFA|s&NT?YcM9_VaWz%JV7QLaCTNwahg-uBo$`9WO zMQ>!%X;}Jna$BOQ(PgiE=rnNV`5XV@5N6i>z@G$`4Ptz(td@Sn!F@p+E_`Nl7K|`l z>5T&;3>iLd7c@Ky{Erfjl@eVWT<<6OG3F?e{4%E*C4Vo(nGqFm7C8QkDR5SnP-5hy z`CIA8Fh>FgwYs*pFq(+=-&h7t%XD`fd?OU=J)I85J1Svm&DhOk`Vx!|V!~U{fAGx~ z>}Ipm@YW)XF?IU>V3ntb$7~;yHdVDNbC3?dG#VB_oxx=i!3PZMU9~uIj5P#xNBb5n z8VuYdRup3V##PCZ$#Nj207i~8g~wyt@oM6FH{J`e_O)8k``hD;;Vu`!_mDJzp6tIt z`a$>kpX#aqpQ%R)Vm2Z{PyRnOcJLqZ3&{Tudoy_OpOQFeC;mUc3{p}|JM!+&%~2w# zS0vI#v0JlgYV?f)mO!@SX4+0E4)hhQGjR(+cj(T8Za>uciuXM8|=T z_r8pcl{$omg67;0BelGWE|3>1Kw6hYhK0`$fVSI#fwn|hq$9D`vc9ga*8lL~w^HF@ zjzc}|v1$&CbT0@7dVeR>fP}$lwU>+M3c&jKKGpenXXJ^|D$nNFJkw^Z+lPnVG_}YR ziUG@K(emI@?zZIxP+{_m3>SG6g`t;kOQ#WQeaX|S;G-Fql<&8$(hUK%aKbY+TicRk zj=sYOZ9^&urJ|U|0Z-(2l@kIz-pf<(uB%0AY9(KmkRL_0NLW1WY#Z}}Ay;N5nhgsU z2=WTGC@+G$#lDu-L`6mE@`{R*l8V>IY+DX@;)8>0P(p&fh;2(t$6#&k#9IH!w)x@S z9jcZ$EvLtpmo5W~$@& zxS$=Hw-e>AT@#Y{-G>fMeSuvANID;Slj;Vbr;K>d3i^xE}2G2^bNPZZElu!!TUBSzV-S+O26(m?8Pi-o0dquy(ADWdAO%ugxLCk0)|hRm@-kcn2E?5!Dv*&u^CX`}qJi zi_H)4zGxZ!eN!d=g@ol(;mHw2>fFW(m2UgZu2OWnc!!P9xon?$2_GQj09X5w-L+B0D`@1*Uz^aHm?;6%UEYnT?s zq;7Q4M5ni>YjAg3sO8f{etRx4W7N|XwbR%2tXlR`@^1I~Z*5AesP~u80bCBg2cX~l z!M%I${FXnOk}J`RjT6nuelHU-vLXcev6#ZPr7{l2i%&6T8U#J56$Tc{O}^PZuZ8#> ze9+;6mZc=4fYiw#N!696H3R?`EjvRtwi3!)FjwYA?Y1@$G3ly2F-G2{(8q-3V9Q}4 zXyA%|sl`lUXq4c?E?D#WYBgLf<^@8d$!w%-L;2$c3*=~@uV|XazRx&ZdO8l5LvR(l zRpCNh`kdHX>k@BZ%@=&oNrqhmTHR}=#dK68Ka7)HT_yCZ!h^fS+HE@*w`3I2(_PZN zFF&-$i1g>1S04JKX~P0(J;P3@F0b+>!AnW9^8TA^OBw0;_A+x9iekpowdgfKY* z2`m!FzkVX~tDjH%l?Qi8gw2k7`XQM%0Fy%;Ufow9^GJfrqmJus_#guZ!;Ok8E#M;m zF;-#!VJU0Mc=dvf;^2is2$kaGB_<8^GBk`nwtUCwpW8n?imAl-E!oif+Mey3&*wD~ zEIHmQGV5^N@gaMfsG_kH zELJy9-sQ}Jt}RTo5(>-jixE**cj#=~b&2ARf;vh5B>QhodP*ae?o>*mEOjs^&s0sV z+_{zN;CirlzpPh8crSK`l*&8HZm>;%I8l>lh)8g6Q zXztuUH;VeIM0N+5$H@7n7y?lQKt{xcRlPoZm`b^=aj^ypj|8p^Gs{odI32;t12(6$ zoe@&tdfA^H9j%&t_wLq8$0ZFSC<^knh3sv} zL}b`b=V=*Mrijy+Hh0f;_L$U*N;7N4D{7bQ2qBFOo<0qeRmd&rB-mDMsxBJGktdXc z7n!HR^xr%Xc{$8o*!mfMMzp&tRYF80VLRoF=v77BEr!I@P%p2&oweCpJ`2ImY{)*3 zzwJkoESZ1W@~;}7{q9uDG*9w`P_0U^Vvq!xy}rzrovP|R0~dEJ6Y~TLwlu_T zIB=dAZSr>~^Mle^DY-bQc(p2=Kek-GQbE6wMn!Y&--TZc?G-5%d6MM%`%8mQ&v_PT&C8Y1IL_24#a#ZWD6(_lN?eqC5GP0+CyHA~ zFRX@D?c~Sz;_pTy$jdEuLRzfG&rfb9$)V?O@ELpOh@Kr+@v0b3z4-W{TT!wRq>(*R z1<}6o&DQcC>##8JB2%3cv2``XBF6N4W3dIy)EofvN|rJ5Q?&RHE4|)1izC%Qcc)%Fc;m?umN# zq)OmsOI}_KYfkp9S^c}0Nj*ufGXa5A?EHQ!Xf3sK19^99Weg06(>CjGy06usj>ULl zQmEoBrr{x_h~)fWbfJKjvUScyT3gciPm<|#GTdMmv%-xU4t}tD{`+Rm&hbK1;KkZ(9-#X&PhW~Z&lT@?m-u9S^G6m? zQ~38sJvi@O#-hqM=GyPPS1Kf7P#CvwR6G}#QFU=K08qs{dCG|)e6}48uee=a3X5?^ zsBpImY%&tN$X|GOh?Hi54Y>;LtsrROzE}h;DB!-(o?vp1RHWM9l+umL+-$z3#cud2 ztpxmEHIH><)^|>we!^$+Ef)X#-K8W)<;D=< zP!fmr-i?e1O-nxB_q3*UXGB-Yi_VM;pL#KsXp|!PpyK@M{p#h0-^{9Sp>YAg2Xk_kibOvaHtOU3B)x%dQBx55j)Ru+5Ua$DtMFp}6C#Kc zLc#j4l*gk-&7&EC6DwmNQ_;W}W%2M;YCoG3MqIqsZHesFW7Nb{#Y-#hPj48LT%fPk zM4;c77uRR=LC%Bx5)rQnxo!P1GvZ%sOZp4M?}(jB{`~(&i9kR6?-rz&$xus8N(C$@ zptR^YGm|h!G6znz@}x2S@~qkiW;wI9Qr&)8kAX?j>tLrZGkfeL#FFR|-o713Lte%G z99MtU)YLFaw6?l0fBZO7?c*U%GGBe6|BRNXy3d#6EAVWEVzehZEGC)ZBi^%?eZfBmonz_IX zw4XMY;67I$Qm9rr7q>k1*#U&$pi9%y`g4=>eGh{onH*}t8pR$eK804C`fh7xh8+(H z$>kk)1-LOfcm5%drJ?rrLPK;LW1Vc;h+S(d)W^-9%NI)_MU0)}Gyg{eGm(HIZl@@a z-5S$iQ{0^*X}5Eu2L;>4PSlAIm#G8qIn?J=+TRXJASfTjtlWljSr;F$AY*)26 zO;nrejTwY%b98=369h8AiXGV`uaf2g#hpu{?(fvlH_TO(8DVHcr_}qqLxyYBHJ>4m zg22UOj%R19rKKdbLuDgBrWPBQ&^4qaOL#asl5z+XA2{`jjO$)z9TeUG4jOe^kw(=S ze43P`pvxYPyppvHhBgI7F-myv9658Ki$Ihi@B-OnG~F@tbe?m*e`WhZ+YQP+`vt>5-7 zDmYm6;N*0seNNG=52)>_Q?J-z44Y`{VWi&{n!Ha(D;t1j7EGBB)7`YMjp$|4biXN4 z#B}odRa&I6p^-h9cN7v}QTCVeTq=xVo$%t^f21Pqz*j`8xgjuQO7XS%vUUH;KafYp zDl%6;DbR)-gFG_W8WDOE6>0p5gTIyO-SN^WpnkB?Jur}|7{x5@H2}t*4oWna6=9Df zG%NU-LL_de${L?;N`gR^Tz-EUN1eWSkEk>Is^N@;Fy@?1RIso@C-|C%`3VN%V>c;- z)OWf9rFHlMeQA^;NYLLY3{0Dxa>T12V9f22`uH@9{Gu1riWX)XTfyE!f zw#AjjATp%=k8PhPt$FjcK|7+WaYL073V?|q)toQfN}-KzBij={-2B?KVpNhN3#5ri z`A9c&_L>m8>`jj0S1r z7BK6Lli*nI19G&<0SqQM{xTKEvKipkVPV4vNxtaFW3CaBGG6!EI`RS&d1ue+79Ra1wxS?JHp4(rHB(+ zlCn zGBd|2k3s<|KQNIMnn7PzXu=ZMDicSZhUTy+`Gu5v)AH$`DhVn{IL1!j>>H=H)>XQN zdNJ2MDyFukY3FUb_PjN*yDu~7BBSK!?sKc-57$!2{$b_9jMbs|Hm;i`)GZowM zXtm;phSM7qj|nD)BiALEdwi4fe6)`9&Hv|#%Jf6>O3k5$nYKV~h&d>IA%sx6SwZzCDI}A=VeE z4|PutN&8%;-f;>#9esv-=E8>jzInAfakIOyfTu9o<4I?HvTC|)8Cm_vWI4{6A#OWG zVEC77?YiNJ97aH8hpy`R@bUCWTkQq zgFSdYUB~PF7sFit_d&nI!hR_cBQwgbraL`=*!V1|ws@=fi8s5ynK zKCi817#0*ERrcpn8(rf0$edG$uFEi&YjS6=IVN zM|D!t1_uz}Jj_63cljWYQwd~>P+-l=_LM!yGW63~?Ul$09-IIp<-1b!< zT=RMEH}r51_1OA6V0OOO%Bh<0J=j^Pg84B%x#}~#kA+KYS-v$<Rj0qwR5D-Or)6Yy;K)yO^?V7!S2S*9g4sAuXP()<1$wT@}) zdaKHx>M8R8sIWXT*|fdd)%mL~VYFAFoNvm^`AHIE{VS+Y^x9g*v5Ef(dS5(eJ^Ur` z0xeHLVM%rZOy7dFZ8_H0hPM*Fd?%=Tu9;76mu_mbxCLG{-uU+?yJX->-0PPL`Rr7}%-3yDtt`lPyxU6#(Ner-0{slE?A zx|u3i#MpNz);Unq)-oFDrnBwUi;&6bb}8weE6};-jjv}wi)zmWn=Lc)GbMj3uTN!A z3f%I!v`CO>dF4Ad_eAFg6Qoq-k%O$Qf4ghiZ_Ab!53yQr-O^fz*LZFQT9)PXwou)1 zy*k#T7BaT)6Tr__>~3%<9XfolT(&eUy8VO;RKTCm|j$*th^-rXd2rh$2CJN%c| zDA*;2+%4a)_3TmoiA=ZD?SCK}rNlrbMb9O^A;XH}?^3b!V=Evh;MZ9C#6h3?-Pydc zVNUlg*^(SRiC0hWzo$h#4lvuNdVw>RNg5q!8tGtgj*j$IThwEC^CJeub=}a{Ic|P^ zbQtMAhOl5wD>CO@wMPE*yl=3*<~L4hzc&~7XTe=MR9(#Q69*HLV^8DmYOjM$Hb;qK zTcYorrV(RdDE{+?d#uKa!3!HZ*tvZ#R$4mh1DvwbXBQHwP!f z)TQ63dPctiYT%5`Nl5IJ(cCoqaEu+tK(#PUhXhtd)pA0_P^@&pO)G0_GZbC-cRG5c z!;G2h;!QWi&KoZO5X6yV1%8_7+6Pq#ZwdOJw2s9x8e3jpN|Z#JQ$kWUIK!9K{_& zkFt%maadgUyeAjT@69m(pcMuyOE~f))?7*@l^Au7KTHf(aUUJp8&WzUwJsZhUdRCn zib^ZPITOS=(M6pbb`BYnPExudP8(5DfSV_^mm4XVwA?3qFupuc)%4=_wZILS24N!1 zuu50g5n~ISmD+gI4rP^e?S{a9^xvr~)kSj7<(Rr$6M^BC6R}R%w8MGPJ={ z5R0@EVnj4b9C-EtYnxJtW)8efK57tjIR+L z9k}M2o1dX>JyGp2Ha}nKaA*)(-CN|uU^S>a*RGYFrnTJXSw*|G8X+?7y%8a@o}FGe zJ;J4)c-;yW0LO7$Hj{4ep6z3A=vr#{bW38;P0P-FB=UBLUH^7EzER)#w@_tM_ShL2 z7$CJ2tF>$sTHc#sJc=1bi=8H3ANTZI@%wWp+c>zta*yX*T;4Nh?D(RsWKQG^Pspaz zt$}4eFL&_{-v*}~0guuxt;9X|lJWe)T53RJ&Cn_Hc)DGLL7DA}Iys*yHM9);u~=tR zCDruTScXth7ql?MNAxe~PHC#&F`T%wS8_&Z$oPWm$ll-?7256i!~5CoTO56GTtnqF z?B=`tL1H3k6^1^D(?rTP^h)+X5(6#)}Libs~+5KIDBQ zVr+N-A)~vj=n0eai%d=OX)7?5_`sE^I`MXoFn%rOL^_d<oxEG?v_hZ;Ma4Lae2ug+x_*ej#CK++WBZ#YQU_i zms6NVlUwj|$|zoxV&WvOpAW}}xgJPdx>v4N{j%Ua+XjM;q*VBjkBVr@p>1Ml@!`Uc z>0ANBlR5bnKHIB{ii zCcfQX`uVAyz{xs)JY67@l~154n5y#oaw+^UXa4-c*u0u2ai*(!9deOHAoY@8r3dma zM&ZieoPUkL&rf!#RbAL*cPYPI^$sT`WZ~R0FRErE9H99N*5;MV@gYOclrHw{y@VQe zglZu0>r@X9m?-uL8V3>ZSsQN1Td)oH=ZO_N8$KECt82n36|$&)C%FKxqVQ|^ne!Uv z@2An5(`smC$3ytv+*dV~4)lze%0g*w_N}Aj{+i3;MQUGMpOf!KpJEGK&8U-y`Hf23 zUtB0LJbWLiF#Pmn#UjF$VycoKjNE|3E8qlNBahtWF;LH>Es{|xrLeZP_{HS6Z?e+n z?R$d)n&0fxYa(a5)S~oPoXyt^^o;_#)%>CGWsz8^nA$k8r)OHeHkN^gw3Y$Ng~6 z8ZLjI=al61_DA;@FF2hps3Ym%PXjb0f_|DM6aK_>BuMXmj$DyWt$yLzv|%jc6POcZ z0G~v61nvfhJ)l+meB>6CaJOj6F$m%avemvqu?UBa1Ga}%Q&V2rFY+?a*(dTn7`PCT z?%*?FK*D$>idpo_MRZDRZCq@btnn-AoLLd)39&R03@p^5lTlaQ{hEZQsBHno z?MZ1G%}=bp%(%05frWbEueuf&=itbq*5aoJ4W;XK6NLr$Dp;k+5%@#D2{ZE36#^Uc zmt`CHAr0vJt8d%6o$v4Y%wol)Z>a$4$N0+*GHdxJ~VsbEFyRVMqCr ze5|&FGt_>rX~RLjjJU}^1m5p1{SAq@g?CRzMmv7(U4eP+(uG3bO;Q`WC64}O*S24o z6KD7B=3`LG&Wpr8IP~NF$xwLm*$WV5(0IH{^?OR<5?gPcc(jkzc25Pitsj z;+Q7xsM{~3f7UR6d3k}R|C%nsy}H<=Y0FrK5y#gO=s;W?o`rC;f*J$7L5eM_*|HIP zgK90bcGG1Dj^87@w*CD;|70?h0J^wQ^QyRZEKTpQgwt=<# z=LHleGxy@wGJEBL0D<$J;f;3e{!Avf7fqnnZo{p2&Yx<-=0Co5X7_j>|zAoI?6_%dO3CP3|agFPT4jJ zhCrAw{|2?Vi5qVK7e34k`V_;$^ON>##mA(TWV{zKl$z-E+N~Gq9{T;|K->?Q8jiz~ zV~DyV@OoxczLR9z5DnE<3-xPEgX{_Z%*K^vw=(CAk8jEdT8M`^>m}bDGf!1HVrpEL zbo)E>P~BzS_lb{@2x8kcGo8^Edy7P8CxKF#pTblXl0@8#uv=!s&o~p%|25;zJSD5= zz3MrpWYivdHW*g(-gBn?=aWONj};FSkG=ahf1dUBG1xd$j31#RC~hcd_Z8Fr>1*o! zK}pLV#$6_Qn|5L!2yfL33RN6g3&UKKl~~lrBwCZ;w&|V11TTB^*;STYG!HY`l(jQYCkKdL|mBr5mr#FTqLfCVz}g!}yZcoYi%+VoGr@DI8g3tM zI^Py!BRv&u*wXpdSLBFO)@Q$$PrTu|XY?>rv2{Bytx_`Do_kgSNtE;u$__T^Ukbdq zP){@Ih#S*WQ$K%BnXL(5OZZV%7$Ff?))Vr{^!O6RB}ocTqF=w`8-<;>S8bR&A6nwJ zGU~QARb-4B{)bDwF=_65G>Eq|us1Z*e{C+aZfosMTiM#2jw{o7w!d40)H#bOf8L)R zJUHZBM2);A>0&?0?1M|5@WyGLi`fvcZ$96?Bs_Wr$3by?IyChsRxkCFbwYV$k1_#n z?K6=>7AAVk?w5bn%`DJb_MB|uLg&m~2zAKvXbxUO0^JIkzye)%i~re?p1b1 zZb*{f**Y1JkAcgP`_N@^Af7u%ZRYfog!rqp!+MlkO_z9~2Y;J(Q1DN!-In1!<$~dc zh5A7yhj2;QY$v)*m0O593|i93BpEF7v87MhhkF>&>)1*oA@;!@zCQ8RV2`l=6De}` z%QLdl(C7^wrPc+XahIt8Zy%5KJf;(QA*h#RlM{B9BzDJG!80;zM=pCaoLV0V=1An2$#om6d8~6(( z4^Si2i#yht2q`#yky?7LJjJ@kyMw$Wv{B&SX@v6+g}H5%b8;<`dRmoct{aTr2g z<)ji1-}^OVB4~J0xH1T@JSI$+7GK$RMplBDgUu}0>$%~<$;)%n8M~4 z-)Uyfm4p%i(VItxS=i1*P zLf1~w4+;m6g8kf#+D${80L$`fF@~-)*FJ6?Bz278@w=yHQj*9lPxd~gDn;X4I&?dW} zj%Afj=BO9q;kgZC3U%a_2PsDhjjQd~s2wod!)?)j(sQR_#TT1;2}=(lxkR#W)_(wB z3}#P!aOV_EHE{PT73c|h5LBsqKKEgwkFXPmX**pSl%MB5HLv{gC)o&%xXA_6`FAWl zr{s+?!t!qe^`z;ads7#IGu4iRphwKcT}s#51SEBIGIS_Y#k_Y$NTT++GZmf6P|e^_TC-da?y{` z|0N@`FjxQAAFcZODyNB>>CP~(U$t&?-GzogA;G+DYXzesL~nlH(v}wkFn*)15X2XZC^Q>91L|H;}9A_S?>4YqP_UsFq4Ds*v z1C^|JC+ZDm6NyMRH|Yo7KbxC^ zs!r|`5DlW1wlSMl(fbU^K+5;)yUL{QY+Zp$Q{l zr@v~A;LW=dt`+b~%o0ubvn@xF^f~0Qv22@J5(G1$~fpb8ECk`yw8=4jvr%Zp_i9>S&wX`;Kma z&1JGo_%5QV%B|PCXZH2U$teHu8XxtxgYP< zw-r^D6?$BaI?KPrgezUFG2H5=8gzTeLP-iYn{BXTq)M1K3tgZx7@~th`c~8n z_&76C1RU*QH~`7gL|15x*~&O z(MvFcODjt{l>oTxGk&6uqU@z@*4_bzc8T-ceeFkbF) zNXZJs??&+fPV?GU)vOW@23y;7_jrqB2MWlp(31E4)MJ^|_D0#Q7Fur|Q=8P^R##ng zLF#YI4E|NIKj9fz6ZEOxo!Plhm(qc2e2a{&Mr^&3X?&`X_{yv@)dU_Y9+&G{&Ye1~ zB+2deJurNu*;y8A_?sB-TH6{UDoq?{-qlC%d)b&EpUl#DMIo$)NA?!8`eFrOs#{xu z*<)jHx2Z=0+=shN_9ldhy84Qn17~CyiP5a5DJijeC7M>Z{Kf141&3`5^g&QzLMGld zl7Xkp?0#H)vu&L*2K|$oo1<01A6s*9d3$}U(E&Y~^A%&b8M^s=@f+RehnF5+TalW| zC>*+*Bjs|}@8#m(R(hbh&`PgkEC6Pi-t3f*3L{g3aEFQ&#t%JWS*ERMSA}n5Y{zMr z=#lgBu2?Kx)kA9aNb~@c-u-ON6)zht5a9gQOU$w%=%=rEUXyE4v{S_hFk+JnRVJ7t zZ*NdEZ9hu}0g4$K4;zoztNz)f*o?ER{{0&|D>zfjX3J60PZ3nZ(7V^uIV^b++wPQ9 z&qHU|z0b94^FObRpcgJM_GM#o4HIH^bKdLE@1D$gFkv`0z!_gtPr1lcUBEFadVlk{ z4MKR6Zy={o(Wu-oF|yH3x%MKe_o1|tbO#JMwUY8+m~cZmQ3|N< zZjVdADo0VJJ3{-?mBqRem2BhMq=&jP@wUY-d;3n;D(x-ax4E6wX3b&yVp~@ZRkSiC zBX3JwAU_qf**9UhlG-eRTzE}#_XtFX{lj;Q_fRY%N-OVQ09}643s%*I=Gk^O!9LS1 zi&q*vCA8|elx}L8o2M|LV@7SUeyL_VEEzOwj}jNooR*jHY7s~fk@fM>w2vsEZaF@0 z`HP&6bYEDsaUon7@;HXFQ@kXof69*6Zi$k%DP84GTy?Czg9Fo{?X{WxP}l5?)q$Q@ z59aR0TJBko-t2Z+iX3g;GV-2fn$vKTW6tO~An&4YBMHkrBeKEm4TmO|92Rv#6+;~{ zwJz2x_O*uSWSWjB!S~rC$)VQ83+e@8l?Y#j+zrtW%l9ZI-a0H2hFK-rM}d(%2^j*l z4gAxZUnnFV>)Oi>3f7ugzHRBer#&cld?w3fO~Ogs-zu)LH*Wn-Nx{hM#_7%cl+qjy zvKjwVQRCv2pLv15_v%|m%WP)@_|)MVQ4xk&s=$0m&?ROG_nj9M5P9WLtP>Ota-3XxIx&nMwu(r7Z$;l+_cz~dT-+h|D&Oy* zc4Qv~aCID1bOsc9degptJ1D_G6=L&23* zJs9oM=W6_8-Zs5MZHIfpoKjy!Cw@plXM>a6gUjM}5;08q=eAnhlT#1Vwd|dMxP_R@ zmc4X>IPXwDx?>X`i7t6p?^6Q@?0?i_+~1bNgQG+o@S3MYORUVsGxzOI?`}@tdD?HA2&$$ihvBv83O$bK=v`{@0zbdLcRA; zNa(?OSPrL&Z=b6nWYo$bA5FxswufD?M7(f=eJgW6yZbW>IRV$3<@q6iO!T?0#*)dT zUR9x#AxY){{qp>F&5c+5w8Wx~Qhf(G?H}i)_zfiXpPM-k?&*_r zr(K~dpUO&K+3z|Vs_iOAwTvB*T|b&_vA(q2T}s+ySk2#S*f@tDds}s17xnhpu`gd1 zOvc4c96DNNp|7^;mXpDwseTce&CBB083tC)%9s(W(y_)vlU{hP&LSF{m+0lG$41hQ}HH;Wp{N)`At}dS-o083^hlD zX5o&v`9PMaZGFSxE##$6M@>rwp|bUEW-{NXVtXUAl1b^}*!J(@O#+UnKRyjYVr~M)hr^@;2sWaroe1p^FCA1Ks$7|By zRk2|q1#qyePS>W(rQi4;K3A$Zbr>rp1yWNRQ+8|DR8^M8MEFaH4y=T`E0iNr=sm`; zY~R=ZIVG3jgp4|2JDR)&`kb#v9}6wfO8naVzOd~*Eqic0QpdTPhy)%|xW91(0&EXM zxkK-=_u~cL)-T|`ER+X){os2krn@p#O?k0@h=s?Ca_pgBwkvPH-T)?Te7g>}KZvKN z-(OH~Yb~{7N!^;;grNmXyU(|x&4&e|f4xdr&PkMnh3a)c#~s~MHR9HzBbOU`uT_T> za^|PZ>S%O38dK76G!!DXZ^$!d%y?F~m6@NXWL>nNXCtLS0CEZee{RgEN>-ypLJW73sZcKTrFg7(9ivr*^%0D@_uC1M;T$x%wE*3 z<514eIRh=x-(p&udeACRdfaa&h{Zetwbq*K)Uc-DY+11msuhyEIL_+dE*@$CT^vn& zCmh?BD$&;JzPh|66t?uTkatvH2bPb){#@4EM{Znk8?aL`Wf$MysvqlY*G}Yc@=+|K zh~JbKFC~cF-B_795Z@|2gQL*x+O*y8@hu7hB8#OWB1PDO3H~`gge{V{jQ2BxLxFi} z0G^pNarr=A$;@OaLbDQ1A1jONizZywYvhlAhM|)^sDs=R@h+p=gfql2q49 z>AKYBQn{6$@i90mN1Ex!_@-1f>YTP^FO1Hs4?Oy#uYB7GG~|J80^hV&Fo%G%0ZL`% zp!x=*+ow{vm3!#ca?UjZr&Ry4fqK8PI?5}=)xu5Js(nc$BOzF^UEB9MHD(LmRdgEl zc3C`Nf|%}CtLdGFCy9W4fEj~u^cv8!AK_fBp3y=ebVlT;t&LHV&Sv7Irq1W% zkWS3Jb-=YE54gx7=LZ4O2 z91qt3D0GUqxfST$o%G?)r=Mv6cM_s~Hyg9yeLp+Hh6LDKawd~+nz5eNk^tMb|3{ua z2XG+ZX^!CYayZz@$yjFi{vaZ;d=8(OXp8)yCvtKg^^=(oZE^u?kLGp(&8b()ndh=} zS(M_*?dt140S?nfnh4-3y6uNy3}5+4i}Gtj|({S5RQ1gI)08+fE5I&#NIm5>0r2~SVMxIAwsgTyj&Ls@OXguKrvfjj)r1a+csmNBdY%8%KZA~)b^QRboNh>I!9cd1+a#`&v~OetYHx8wf3D^G z`V^s6zKaZ)4u-4p@&lR(lFXG7XO<@OPl@=&7ewk#uKa*w3Q$m*S~}KNE%ORH4rDr4 zg@`g7&ot!c|B^|as|9^@Q7R6jEKj5DLbj+yshh{g~C>r3S%GkOaMYttcIg zjOyJTocUo7)&}y>Xsw*9R{^_I-v&H|-&MCXpjh>y#%ptZ1_96wCvk6?vI#i86`u_tlb)J+=xe8^GCq?3s3pm z?l3W&Lc6?v)qL%kjID6PeyTJO%X)XIVgD_ylua%7;a#FRgWz`@SS|2KS&a#OI1DKfG4k> zONOZC!ve||uFSW=gobMChlKSz3sZh-&AmL~=}S4s0wfRDrh=FiE--yk09ljA+JnGb z`TCYsJqhXv87cm{eS=E{ z%kS0F0^cH?)h+tLI|78AjQqo2nQ5DL;={*sr;zB5_?xyR&72=)F0ae|@gVbnBnYED z8ldOk*soh5h%4x_x50v-j`aKMWWvn#crboZ&3_*lkuc(rFE>Jxv&nelDCIbGSQm&9 z-#r(ugisX(=B|mQNUm8Y0m zGa;NWS-PJS0|N~xQ)b(%m_|SBI62KG?S}D{u4o?|0OQef7QiUCNU%lWYjvfG^jd$`cm5c z=d~!}t-svRtRNF&h`;^+S0C#C>hXZ?{=ajYIO+bknsrAN7(o4%YBFgiStKFBX?&Sb zM?ezXwpR&zCV)}BIIz5&r#0BHKMev}(!Tcvh{uQR6f%=r(hJx7DN}L3kwnw@SCILO z%xI;9tLs?h;eJo*SYzWtC3Pa1G)2IIfCo_!Ll}i4_SY9LGKU*->gQ0QVWmArToUkU zK=RE&se^2J$vhJh{D^qMDSbJ5me~>{6Jr{_dIviCB=jQLE;#b79=bEBH>;)YDP$C}%IN-_@ z)-&I7b*{N_CXM)Uo=q|@C4PjOOz|S=$OaPej^HTQHDtAmOp4WnmmUe!4IZgD4%wQxHRV3M zCdB@48)%Q(Q1fpa@(bPANR(l&e?R$HEq2%0v6g#jINSd&@%kDnL=jO4Bv4jmih)X8 z1QWkn?)21w_^0$Ei42P92PwT`i>jbRNO zZqnKEi*07Bvaloz3t{3f&=1)vW%o0Y8b=`RKY&x8D<9XKsd$M|+^ZZNFL(Go_YNBO zl+>0aqQeW0gT~PaN*6GN6(d@09`@UjZfkQ?O7<_I*`gPKl5(%1wTnqbIY|I0a2n|o zetuWKasH|BMIq8zN0KIAhUI$s?uHcYqmuHeNUZbEN8PxSl>3=3Ms9eUN0VM)QBD}u zOl*P7QMpQgr^^*!rs}V7DoQ?z(EzRu1ma144u2yz6*|Vv8%t+YNq1Ty7ubXR2|b%> zzCI5GGykIzEk|d}1@oDy+{Sb)4QL)GwcLi8Jj}nvGDg8Cs0O{@+9=KAr3+c4{&>Pl z?vGd?X8;<3_meMFJZi)Yd0vBjV4Aj<#h;l3MTOERxxzNb*prbuRC>A z^t89)jL?xH(e5yj_QiTPqrv>Y`%vcRj`x1#QTIz?;`G15%@A9tJHBD{E- zY$Zlr>xh5ze^5b|a2>4sm-crILXjXczT-q#i4e(nlbH!Pbx#RfPK0d&eys7Z#pGa) za6s$Vw>wOZx`Z$GsaAoyIk~M>q*yGvEIYZ+$pp27gMlA2wCZ{d2N}edQDzhByBntm z^{oLkJYsX2$Y_OHvLqu(pDQ1@eE&W1MdW`{>G6NbF(OY6VspUyYj01~S(15rq)0qb zzA%#@;y~n}fk7?ShvMM`7f*8Q^GNSu!z{*Q!fOfR(bfhO#@gD*riG8|zv?rCF?#p89TQt$pgVgbn9pP}ZVu zp#^oC_=7k85~U~J@!fheCY~lU^XSsr=cjk5+Hh~aeHR^m#ME*S^HJ_nK$bRYPw8)X ztkpVoar=!-nU^M67T=uy`sjMpm5Gc=Sz}S%V(_Q{OGf5y0~oww$N*M|#q)r9@hG^L zv;>o;$c>w_Bx6nb#XsN-aSM8MT)9PFe@MUnPoGqp2MEXDD5eBZtF;zT#fKB_U0V9V z$J=tI#}8_NZY0n)jx_r1Z#j%rIyu#ShPd@n5zoMJeggYMnVyen_$KD;oH3r3)`&Xx`STqegf-@l4$4xhC}fe8XK5{<;SsH$ zo@eU3^$8pV^+=s)jQokun{I6cv~K*?%*L|jt#(;rG`MP+#RP=i|9p?8yW2WiHVDAiz0xcAW=xD@zYu_ zH*$yM;rFz(8u6P0+7?fiy{Esu+P3a#I9NxQ&CEz2KKB$8Ru6yCnn9rPi-2o>V2g{O zHhsax1FZ!ja6Y&9VnzaWULE5*gA3@5x3L2$_OS_2*1&gTyPCrjivq=gP}{e*y~k#N zX-J3vAev@e{YbN?E6xOO>5BUvZGvC_9^KLsMoUYJ2s*BCc`{F{EizSnu<*t={l%~+ z@~@_5IzVNYnGR;ZgGT&t8*>}W%$T~O|8#>-lC@}}#sS6gWGOvwt-X7=_35IxA-pq@uw-o_l_4|6C4Xgx_En0nQcz5t#;X8SDh z1$p#+U;ya{3YX3#mYAm8_l`9*_F1=Zt5^ZG(%#qh_4(~4Y+A@xu-^x#xKR;%{r!7= zje>@PSF@d1UuzfJP@rK&)Btx#_y^)Y(4wjfeChFeOXXd`X)%x%l5k(Q7|R0dX*!&I zzwY^Yb-$qOE*y<`WrrGvz5E5>&$kHoGyAw6?kO^J^Ag(pxOm$x2(ImT zA+^An=~N`!@PX=l&>)<-hQ2H;(t-KdN(|Vx`>V;_$b3OGBco;yf&7cV6s3XAD9_2f zYPY4POHLTuI z(i5ke0}y%%@kLOF|F{S%@gg@g)#1LoGyOS(@J3M0#iq*h<47{8ERW$!r?$89*uBjk|uuUR2^IuMy0(rDvCCMTH>ZC+KcJn4=6KJaR3 zD2REycF-N@E)Wm#JER+nEp-jkDJT+VOcWHycqCNA(pv?svvYbYxSW}ppi6+2IoY(#fAtV48VcHsfAQ*- zD-M9}86{s@*e@qbMcdPvdE3tQsH>(*H;%tl_`5_DK%?bt?Wycxu*|?0kPPHg?S#NU zK=!yE`U`QG@Y)R}sjzi_d33Xhd3AJ9H!7OfI#!zA-*@LPub=163*5^=6G4LbuHa=K zt4%#M1Ky$66+QEyu8&maktNIIZev+G#5Fw|Iwon#TlprP+x1o?e`~xQ9M1};W_DsQ zFMmg;)B3&U$5l$p4ro+?rjTt{Y{t#4l z!5?nMvC==PDlRTBFV-k$NbX^{){#+SLo025OyKHB#-EbTA>E-LY=bx)GoDW}b)r}7 zs#Io*{P{)nyG4DK2t5h6#gP)NoT<64WDu;~uoA8^3plCGYhAA#%jc``1JNiOYugd8 z2VJnAbhlju^`Md#8`EPPL&Pt`HN97tx|*&FaoZXXzz5#=BfeMWWz=4;U^8SbGvz_c zA1*BT?N==<=%>dkzW}X=D9Jt0uEY+BgA(`^FqO`#!h19zs7ZK%=fqvMo1$cFz-Ig5chgAFV~fgwumIRswfzuhfC1ljj@%DOQa3on`0JEaU}{9>J%(l6bm z7O5;Y*8Zku&t=can_LxgIjkKRlbV-nW||t0+(X1=WlH_Jtm)fNQCZXGj{)^Zc#xpA z?{=M_^}3r> z`@kQxzQHWvo0obGxe<@{^q4`nnleyJdQI0TNlpmJNNFQV|j5o7s-A9Mi0~63*05fm5;i?vGKiP6_p&b!!i* z1VF{F=LTJPtOWUH_9y4-ZqaLrn@TRqRNAA{ZqmQdN}J?LKFPf(!(c9~!U0=O;3eR< z6L_ok7naVJmA@%?U~^tG@cV88qt0wpmKU9w@L&FJnFj7=F?1DHuW_`>iCF5dr3K%e zg<OIW~ z9?ok&fmUZ_<*C)8MP-R4<12`VC#YL*S?+Fmm*8~TWUWs0hDX80g{0DWA2M3!+0=;3 z{g(Bps_hsK&0W#lsN3Y3!Roi|>*lQ-4%Ck6j6cbEr|?mJ&RSxeGrnnh+h()xN*Vp* zS)q-PxrD{C+QHC`uczJWm4nAc*E@7~PlaqST&pY{lXB;@`1E+maoex}zW8PIZn589 z|N6@S&Nx4oIYWc;_8s@7`;As70$Qpp@W{2KDks8z<3t1IKzPRMTY<0b$p?mPwAL(( zeww<=qKm76ld}^dk%v)!_3kU6=+d9MyHGKDeG^}V4rY3;mzf)3f|Ob;a~Mns$v-qeau@QFWuGJagK zC!YJE;2!c~fy0OW4ihG^E2epa1gG+L`x|-#0o^XnG(OXcysl(lDHT4shzl?{96tA^ zQsZ-BdaEcr`lR4~1G+Oh6-Z!+JEyUrF}3UB_9%y_8WXnZ+`@|E*1JTO=0)1ya(AbIWM?hoh=}-JKfEE=?|WL1 z_{*+94pRor4_0qbr#@ZOp7f_kbDkDW7>-FEFVx!s>v{$5s% z3tkWjNKo?}=vRuU0|x$`^*2`K%?1tBpO9q1t0H<^X_I3&G{7;0J@N1zj4v+M_q?qL z`*DEky@1(6a5l{w;^PEmx|iwVw{5*@@m4zdAgY5@NfYdA{gK}JT%ioK_c<1s?61|jS34YVknUfs+0Wy*BB(T}-y@T0BC7>x$sfM}E%SH0fv+DAB@|v{ zbQia^NSpU!<{y&RhrMHqrCuAurZB9I4Vh*keMy$AK*P~5&1|O|gxsi{6ZLt^&sw7} zNcOIesS5;a$+nlpDRuQ_r1d-J(70Kw$rknF4yKp~UZHx$W0PmA$q&bhj=d_UKmuIg zb4t|=cmIP2y+%|Elb$m6Lp@vUotD$vJ>Fh_e)f3R+D(J%0z0EQfkj*MMD1jyU&7(w zONGxF%H6Je6+WMO@VLt&V667=oS18$kmlkBY%<>fjAIHsEt-99;kwE>B-Y3~pM^t^ z1>Hk4r+F2uO`Y_D9rGWq*IL#uJ#ZK>`3&c9Ftt41umk-^gV7Sg)JyNrpUN$gi;^5} zS-NOfc$3=A3JIs_@xQ6A@#E2qzFCiU1ltdR8II;2?f$i9MM!&2?PuZh9gRb0jFFqP zD)1!{m%&x6ZCv)1h-GYIGULBak(9v;)86Q(k?`tl1&?p<$z~`Wi-5T6?w=m-{4()z zJxwjRu#Q06sL~?CI(J;{GDxOd3k64>H7RGAYh;EH~ZyH_LPXY84_&N043& z-(i$(BS&Z4uucrjd*Z$(h&rab-*v;xZGbEn&{i9>A3l(ZYHNQ&ca8>d6_s6*bjdLs zDd03S$u_0FFfib~+qktA#aU3D{IUG@JKAI_HduEHUz)}_lJJE4^6bf*k`$X&?g5e0E+Ar9kX`dGQ%C=+aP2?) zyR%>>g(zOYD>SWDkk39aZvn zm4D{^qIco%&60N#CKuPuEp-|Q{-`+7ET4J#4;*;C2Bu%Qt9$)ggn?x>5lhJaLVou& zzX^L)+hG6j<2$+ha~JzJR#0Gq6YY7Wx_)xWx{$AnoW~mb-Q}#~1=pGXmb+c7#37g< z=)5wTrFP;I)>ODIf)d_HNEq?HSc;0FTNISRNWg&sko}xv882&Iw~5Z&-z!bCR>0oM zFwAN^%zh@`xUc{QGc7E@jCAOz6$ldZ;=ab;Z-Vq;Hqj+ouXN(6;|S&ra_nN9NrFA^ zm~HjOfgd)}<=Jc(T|p++4YO;1b}n9EV|+S6Kn0pLW8Erg;$l_F$ZrQ)YIVgbCh!NS zzhd1sWjTa4;?%}=XC*}{$lgup;~SK1miJXhG{VuzLwIuUclr~t+xU$b$pSLBQq8j+nE zd{VX@jeg#E$I>a2k$f<`T=(kJD8c(Nz-iQNc#)l;_MPg?x@zzAiR~#rj@YShowyni zVk{qCd|=Z{m5%w{Ql_0pFNoq)Qv-PlH8tsu@*2>GzixHyySCq+@)@m zoFaBA7qlXi`Zwt~k->kY<1<@_-;krvPzLgvIm=7Ijt=iVwc@})vQIjmU*pP}PQQuN zS3E52HEjOX2&TRNYSieKiAteC5&& zf{DFvnM|Qx`5_(mxwN|NA~h|;&M=Fpz(0Y~$qN>W>(wgIUUXjN3e-eZtP&<o4vK zH``wcrXgBc9Mh7PA8BZCjb)PO8$L|npAd}A<;D}_0g+Y%*xZ1#dH!L$?KzZpGDA+k z7PMcF7)9Zep(Ibr*Ak_cwdO<%0R`jaYkEY%%*P3FX<2xKS$mU5OFy62S@F7Me3 zGY)S`n*z;4gZ}nMgz?T^-}_9m@E79kgrh}F;!SV57ko8| z1T|&iy6Y$Aq0(JBMNyf8gb%pceNFq@8@hDG!|XU@^FY<)XM z@?QLGI&Fj7Hq|Qd`qMC!(-qBGlyv>@(T~OSbUfB5BVEwDfB@c&3A9vqJ92?3$4GYW z3lCRpDdB9ik=s?9^PoWV0GC`>MuEJ#jsWP+R`U0$F_g0+*u=>Sq}Vvzee%n zj;2EE*Eu_+&BFqz=I2MEHV9*f6Z~?kJC`ZehvtUIrQOIo8WnOr-Tj5(9l(W|M~A!f zu=K*DufD-ay1#$$@M|H5u9@EB{`7RO!{w|jO#SdEWpTTzG;>BKyzheG^t;Nc-@csL zalh8KqjZ+)6Z^=2zxvBJ{TaKZ*IeAu_99l<Mnf?RW_ZgWyt_+c`V2+mC)LU#eb}KInx@L z9z@(0{lMJF91tfa_Q)CUp+fhGKILl^#r)CM;^N{!#%vgqY)ea3`CpFC7TuuBtVQ=A zoh5}uX|-~GL3yX@xo;zy`9%Om8MrXkuc`N`9)T&;FqHC?El{4 zu??^#IGH}R8lF1#1jF-fQt2TbS^D#cX)BvWCs6y0|9OrqFW(>R^7 zEX2~sBrWH3KrfHQVkx8<1#XRv?Cxf%&CUWyAg&$rCcnzPgTwh&39W5RY1Q6b`_gOA zf&#)@6p>_bf^P8p(_qrO`VIyzM`!j)^XjyaMw!D@Oqu9_gr&}*6CntE&QH+L;tT=k z{H2QyU1$HKal9J}pd@4B0P{tTt*q^b^G$9ishwoYm)udWn4t2L7PmwX1*52EgPOyB zhvq-imT@jz*-Q42)XL9g8PF(65#UwfXEafS_NeY#Hh&71|fBd7A^Jb%`#T!-`#jacz&jovh~O3I7^rAS8m)PtP5WpV6- zk$8#k&z>^2Wvz+Rh`coPE(g%6shOUH%H^O3gZvI5lw@~Vd+aNCG(=l1)wqz5=1qX`i#*4Ad&_K|JVE0`bD|%=cK2Ftw5G|nUieDo**uiP;PD3gdOI^` z&Sgo;Z9VKzf5xID+WLM|OGtU<71N_gDxGP}^mBB8uFkz1yn`xV zT%#=~hvbV4VLy@|>^x&{t25uWw@IpE!+u9F&9h0gNC$K~3e(TK)tm}c`6$qX+I)Nr zR4hcrvcU??E{_e#LT$#k!{OHBWrauXhkG+K*k$*fkdU>>@ojr>-S}-_`v~vJt!;U# z+MOP514{&lBKg=okXi5%9V|!+j^}U~yp47o;w+50@%AF_R;9M9(3_uBbpme^Em5wV zb6bS8sef}>ve}|7=OUVdldA<|$bm&uoi3)iki*esUOdv2V@CCHcBrUTd&h!m-LMt~ zy{U3m+GZN>%aF?{%}og7Iyx?1l$XDF5p|$lQ*>)AbvKtOH|d^wc{OD-O}Fh0S0dy= z6Ds>joMXtMab%dbSM3gxyhew2{*IJUVUz`MB@WZ0D6o9v=|$c)%)MnWOOvuYIAxac z;ThM>hKDGouCm%>Xl_dRO4Qb}c%*U}BB15cVnixA><2a>{IjHTjM8O=j61aC=uqWq z%ZW(3$LqwzLnNK&2TYh_1mA=6wz)@k7~zo(#mQyPoVo&0VhBF>x6O3|X6tEoRg!j` z?ImB}OXPg^6|*ropdp`3U-F{+mr$P%AJ@f`;<#x6QM`<-l$GqXFml*=gIM|8n7 z=s7f`(p#3QhGen_<08($mIPRCkF9_NOGvyhN}**U_yeE$k)8Fa>2~u9#qB6(x#BAk z-bJW5$04IlR55p*{zWM03<4!u}6*KG^_Q$f3LNl^pk^nmgPla=~5b#ZKh zMJNwmi{tOsp6g|w@*Jd4iuALN{x>BFo(P?2^;GbczwJeta$l}qI+{gw+o5!7eoNg` zE@@bq#r0U(__ld>UL5iO$s#W%M1$E}#cMCO+VI|%To0MFhwdF!SYx_GaCFIz$rP7D zR#*}y*BmuvRw{aUekng@c6h4=sVdI8|K1^se@>By8Mn7+Bo z4wkdfF#`SIN0Txyc2w^oAmVb_`cAOew#gU-}bNt8R=u9wuDFdOx+@+6(K%v9f|qI(&__-qr&P5 zT#mGlNwsdD)deCABl9@=hCr?da0H!@AaHdq>;lJ>G9QlPa07BFuDMx0nv!aqiY1rkMPq+2R#HT0>mktpa%$_M8CrUvBT&ijPlW-3oE$n zw%7h2KAf+Vz_&@<&o?sCulA~0C^*8w3539TZ~T8e@#!faL#!CSb=_+R`~)i?WdOV! z1AHeY>~Dzpyo}68KUo<-NC3#cB0&u$lCGYlaMB=$Yt{;wwiiRa=(z~IsCmET=}&ow2? zU;h0*|A>n}*wb&DKO2xp-*fhg*GFWP;iB%_Tdx2|lCaxLgpImJanbo4X-Fu8umG>) z#!P4D+GJxR0ty^>f%|;JE{lXm6cf8hG(A$$XQ;vN?d~oYoYYC$M1#U{z}XN5zT4-x z^RI(GR0%<&qSq(<&8lhnDXHuxMu|g=$GL!~C+;{ScrV|dv7QGbu!btOn@$kfa3422 zX8cwyjssGNH~zqFyxDSWgxec;SYU)-D(2Rz-OF7qg49m~`hkf(SDnv@3k@*zV{=vh zN>YcF-riJ-s{LYCi_l!3yvK zPa_a`&Eg%*%-wNcW7-V)K>gKR+s5oh@oN7m$X2n{2xxWW!U^|3wLnprOa0P<3`sen z2_jGd`;}25&7|Bw=G*2%AINIa(~0c;j+G7lO_h;S2D)*%uh$%Td9ey6z>j2NzE-;0 zn=^Y+{RHE05a6}Nx%ZU7G&4X^zIewzBaYNzV#2Q<7F76F26EGJPcf7%lS`1HP>f$4whT7?tMn!j3Jr6z?-v_;3%1k zMj4a8j!i!WdC2sj3edX4S&Tf$#F+k=dEqf=9L_rC`sdn;ryTAg6u&Ix<;!h3IZF+y zrYJ|g1Jd>p?$4hF-`Ux7q%HU}8}7n}o)_!Z0=mvJshN1qE?OJ*Bx}XQB4F59zPxU= zXi!2DO#`m9H1OII|N7@Fw!_+PgF5$xg{d0CoqzF0`lN%#hp zuq>X~F2*QrYst%y&*@M6w%5g+G69iSL)y~BW#HF&*+57|nf!=5 zUf!+<&ay@>WGc1%V4=PNUT_Gi^u6^oMBJMf1eM1Gv&;T(TC^#VqV%{MoS%VAu+rZ8 zYbyYS+MKtrC=Qi^=$~x_TVz-?#p`l}MwMj+le+#sP63Vg1K8EH^sP&2x@E+)8IljNYs}K<7x5+ zF2%o-KR2-TEasmh)zqXHzPFcSW%D1;c*qaME=}m;LAIFCW3-wej*zXo!ooA>T3Rr_ zZ0*@~$J3TC7t(Dak+KYVxlJTpVO>04p{+KkOW-NuP=FBl=uT7?S9{DWiH1vYY>(0d zCPN|(*&y|)5L7D@GzHR%4X~+YVNs6%78f)<&P53CA%{;vd_6cbR;pK?Z_shF_)Rc+ z$Y~37(ha3TkAVy*P^AQtaaFGHrmUzM;c%R3X~*l1V$0G(^!IT=`-DFcM-VhiG$$wM zpEI|0fL{7q`?g%JL<{HC;#rv4AOx0M8tWDxt zKM?C9UlX1*(xf>EUE|=Mt_(l2GPSWr1c86+n{&@Lo$VF5uG6{Cco?c?S_*$CcE_!_O2 z2&|-PZ$U-a;b520o^5@!&~rb8Zpi3yw|KRewT-?9z02wi8lClJwesZLVAgpnbvS=I z)croSY0bC;c1RovD~H}~f+ENGe>D1$(t{z*MGi+^xZ%zXx90iXF{_l6f*O~D-7u!s z(0MU1;Q7Oc%1VEKUJGXCna9P&^F2M?W$uG2JD5<5I`X0RTmcpp{-RLR<@OM|Cr^Z2 znldI_+;HYHk+ie1S70jZW%{0vJ{L``8vO11_zYyL%8p=60oBrmD&>_CNKEe3dEs(; zYV~nT#ek{qi(Wx30%0)uN$Z<&;AkAPzmx+Ip1s$?st-9rPN`iAVm`i0D<1c5J}JLv zfkQ+iygcnU_OLbXva&mCoG_)hw_m@?N1K~7-pRL=Op;UJ3JPAG7ULVvfAHpMwCYmX|^<`7B6oE7G5xmbHiFxBN2Occ4RB2~H&LxBOpd3mg~dt6#``yJnyHJ>{F6lmChMkAnzN0T&_s|y>fYVLg!&ipo#z8&nx9mJ>G$7SXRbQneaDk?u^Noi^d1D*1cP< zmflIW=H6M%06o#XbO);fo$$Edt~RSsq^e<34XyQqb`Izc%RFZq*E6rt%`{OTPajmQ=%E#m`cN$^0E~Fs4(~cC&UA8+RF{ggo!P3wqo~2|$XL%v!5HR-;az6$r zi$_S(m?Gm*^+Tm->cN&C*RGYxV4mxuE-Ov`Cav@$-@d`NGW8mFZ2hk(9UEXW-|Cg# zBWdXK;1I86y^T2jDQ!cq*??tgUR`e2;=_OsFA2aDQZQp**r{M#f#ocKQjCI>9!a- z`K_+*y-uWojo@F90JF}QT6X)9l~8ctf{-ZN@OXqMSMQUhM)7 z=d=}8D{6TA8O4*M;L8jqFk>6GUkuGwC!Wych3r7y`$Ut0m3STZcCXA4>BXo)37_d~ z^&Fk-st-H#*tED+>a@J%Sn0LYP_)yd@rud5Y<7U7ZoFs$Hg4`>72CZZAg}~HzNu=L zyk}te#!k$aBCpqki?|{o_ze}pCe;Oe_%8lX`aF95$jNTj*Ip5+n|3Am1q1$JxM_KO zcFG|i3O+|gyHu;hsyijOx|g{i$TilzwA9K_kPl2KU-KxFo>s@^n};|YrKd|*^c_!` zNc7%pELY5_`)BS8>}3&7lLpfNk|c4&U5jw0iAI+(9|t9AKk!LLN0NGsxbrJhpa)Qs zRrPIbb>DmH>|ErieEX=sK@GovPKT@G#Tk}YCnk50*gGFX`p~~69 zG;$|clO{ZAQZPpc4S&Up4XDCeb#(P>(Yp8MGgI+NMrzh2aB$G}71UVa7@ui|A3p7K z&DzA@*~|R(3x4F`S`o6T!!^Y}uAE->T?K#GuRymzp#hUj3SQ*R?pzwN6C}Llo?czq z1P9^O$IM7>9gP5;+y!yk@raW5CV{{vZ~5_gWe{YX>ElP~Gt(=Fj+36#n811J5ac~TRMZt9Z&2KU_W`5>aE9UIr+ipoMV1tp~SZo*DU#T{$i%d)08^* z5drtbp#H!X`N<_^b8{|(z5Tnt1h&nK+M0goO|GlL_jj~3*QV_d*RwvmWr|U2e8l6^ zdC$BcNi-rpE%uZT2QMTZE<$)`A45VwV}1=XVB zR1wo6F}1s++sT3b+0`Z#)yF=?DzNEde%qVu`v)57gqtdf)$F;b+j zL)5R4`VKhcEU(uJQoG+j&!{HJXC%30UYM!^<@(&PXke05QVkEXSMikn7P+dN%(8eO z+zR^Y{$d(3d!hi7_nfx;T!g3BxYErZPvf&Yc}d(GFb?$$ymy8gEb&PjN;8meDLNZw+LDY^87=uOd^v8iJJ!B zm4ztG#Yw?39ajGHJF6NrE+hPAG|BWWA7rtsY}y6*oE~44uLkbsa)l3fou3fb_vKLx zw>yWu3LSaMq$kz4hcTSxRruwc%^;BlvBDTK`HLHED=WCo;^K|suMaMnuj}5O_%&7< z%fGy;Y&)gj5kp}TY0h_G11-*Tir}ug^uxybdFqi&r0*WMTlJU3?Pxo})*oSq+{_^h zV&)J`CuDFZ1e`n?tV@oGT0!wY!IsT zeA_tXnK5pSO)UT9Qm5Z>Ue4GPZW9?DJFAbH{9O2HeA4VA+rV9Cj!aYjyv#2J?;c3S!v9y;}z-0_}9w>-78lUd3ryzT=y)rL+ zvnTHIJ}9ypX_E_28!I%i%-mI)*ds}4ws_%Mf#?1FgHl(6t7eJ%;k;TCe_fwKWMDmW zhZ@`~y9f52O$|P&Yn~}e*E{(k_yE&+YocRnx?}(#L9XC}OVTrTW0v7?HucUbx=DePG=7^`yPm5JDrwfYSKW8aEvDTvvJ~!! zv+TW6UySX(1PZo0@{+;cIDjKDFo$H{Khyh|7ev*d7RCB`U3tlW?f=mF~lP z8eEIZ71V9D<`ecqYX+%@dGMuT`dlPk%jD}~)Z&+RFv*9RC2)$LYXwodVYICl@-*2i zyV3V)N1pMUrOLu{HTP;~Kl*uZ7ytUT;^e~dszf7tD}RW$!D)xOn09_B`Y>%zGxY?& zND$fI2F=}R=&BHOj)Q-u_7e6EHZ6KYkE&Ny#f4xwrBMWpeO&$U41bCCSOXAMv zkg8^Ip9;WzTGsU3)%Jobdb2CPn)06ck#dkZOP{!=v{qqKM02PP$G%8HdcN&57phAH zwc&1?>r~Xp>2gFR+ddWWGOQ|gvX$ul$IJhcJ(S5)9ktxpbuZ1SoQ zrIz8}3Vw<6dJLF-J(Z|sS$Uo3j5u)Qt%GQMYzbx{hmhD@5E=5Dv=?hdtOLT~8p0;} z8*`JV(nclKP&joHoQO2AWVL}{#;q7Xx4Ok3qOEEAJSJwe$jmr-p|k0)*Ci-yt(OZ^ z_KK;*_GwY?aw9&irk1srHs+4!zNMm7n?`*jq`QLreAg8~rlo*sT3 z$r>w7K5_*WzChRNaFu6mw$@r^Z_6BcmfZKY4saRDCS=Ka1vi~tZPzwtD=Qc5Osr`R zH(|UoC1&&8NlEcSW=TGH;IwssJ0l4;j0U1=PC__=(HMNpDF`eqH@LnkO>C0Ew=Wr9 zXxZmNv#;aExYPTTaVPUFpYIEktru=+8@}n8>`(q*R*nKX7FQfGp{8csyCdv~Q&GX0 zd%jWfk4;GX$)Y9qe^fS>v^(5gUE7lCL339SOl?o~FmIXj%U7cXLO zr+-Osoa<4Szx*KvF)(9%Ywah9&n>v`%qV3ktwkI6E;L|_-zHUe$!veXq6a|& zJ$SF=W<*wSUkJ3bhLtIO@=w~07~u=d(lu922j4a7dtFKyySF?$TTn+oP_dyHu^Mgs zr94wPu=>2Sol*q1W2D6urS%(3e#I8$c4F>jmcLkJSZO=XI--$3enhfwxQEN%z?lRo z^S(Cl418^WCFOT(MSfn&6g7%DaRis%!3%Cwi9Yh{sat;m=h2tOXKe{f1G!}eGUL5w z_B5KJU#ZVb1fyHtwBB*j>ih9T(D7l*g_aKY;n<|HeY{OEb#&K#fglk% z32W9!gMQGY%YLIJcvmz)S5RobTd4Ws_QoW1Ya!N-USw>$?@|>8Z)y`!RNJTW8fBCm z)neR8^z2*K?N^-NZ%K27Pa&u8KX@Q4{GcD?&#T%Ah6$M2)4Y(wCW50tz%SoAJBHrS zxJh(=&^_%+o|#XahBEK{eAkStuV@wfPDD%Vc zuPFg5E!u@T8XV42iXnf6wz&p%qG}P#wLMd+_d!$<9Z~&20U8EgkbVZqA7E0Swx2sYYtMwjE=A zRlM#-T;aJ_0e?@o4Lmm_HK9$xtY?iHcF7fSZu}j^Es@6$3{8-_*=@eRRODxcrD|%3 zFbQ|9D`qZ`+ZdJd%(FXyzT#_|5F~&gPw~fLK_Wcn)Op}GM}TDw+tfUXD(jGs&7R;j zIOO`X(p*0`*B%}oZo|M(BTK58iZm84X~6J?+hi2f@CojXS!qm_R~KX!O+InylP>#+ zPeH537oM~6|9hHE-e@E4AGG;vp{KH^NbFSU1OXy}GLb86J?_gMiSlZ)8re}HOY7`Yb0fBDRji`7dyNZ#!NlMb?Ac)Kv30ck&SgnTBS`8V3D2dDR5(C6UI!2TlA09_}l= zok#_yf!7L)8P!G(5lsWS@)V!Ux-J*Y2cw)}iMoaOse*Q1(yUF?^J{yiTH>lH>uFek zSuVwfyn6c(m(C9Swg@1Nz=4=DCb?y14iO9k;B7Af^rcltcSp8qMN3+0DfrO|3y6CC zNo-KtoG~VBtcOjd%j)m0y1RDD_PR+2^oZ+AVs5bS4jNH+i>Usf(n8qenP}ao4^cG7>@md+A6b-oPsg~nMAdJy9U5uWp-Z@X8}8W2xw*BCG3;5(Z+U! zAk&=OXdb|2RS$W|)$CBI6A!a=!JW8#xEXzpRsYg;dj?gzKEjRos$OPezV5y@_u~=X zuCFGDC+&-k8fyPm#}W6TM2A_;p8JZ#k^g$pXkzvP5F}#C)w+C2Yh715a_T2Bc`~yj zpjB8+Hg49^!`EX#8>{+Zs>?IxM+S*gH@^Y!B~zb*Cv`%4^*SNy02m<$I=~C$VHe~g ze;KG`ADPdUE7H)=(9_er$`SU>MK-Ite0#9(ZYsl3w^?LBL@B>|Pnc>Vw_ZqRTi0Fd zxU|75)YXR*aYL`iuF?piAc0e9Cwr___+7--yO!VCYX{yIXeCZ0Ln)yi820Jgu z-<@_|e2E{eY~cd{OYJX@4WuG5n5JyYVl%5)sHj;h``k9Fr^BRxNHuU zrl^-Znok`gz8E;Y8X>^nHr&?NCj*%8o!SfiH#w6whXkbdBEFqk$iEJDb_!%R7Tyu@ zXLfk!k}YzJZKnU>$4Q>VxHw_ABG(ut-fRW(re<*$WD!y%dNteONORn5E_SgYKppjy z)xF24d`&i}C)p!>NBg-B`~2H8e91S2f#Sg~kNV;%b)F!p_ETD1$_oY%Q8`74U+8cd zmDP0Pe;&DA)a-M2PFT(%FTAI%&KJnlTks!-2^f6t2WLJ;$^s=XB+GLEs8%ywTW~+cc{?GgD6pB@1rIGw5T0)p#q-2` zUI0l0FE15$cc&9{G@inZs>mN@Qm>Bwl8ET9{K-A`BFA9JZw zjh+Csva6hZD$Ew^ zOt`c#|1Fj^CUO~1_=|gEKR7=0Y!Gt*o1xK0s6O`D;D@qIHOX3{Xr;;TB@r=U%>Q38B@ zjYpH)?a~fne{GrbslM@$$P0r4ww=dm-aOCM0n~`5kr+!8wYV%vT2|<&I(2P1=4=p@ z!${r5=+Vooy7{$Lj)#KViMBjxq68o}aA}j^!%W(G_LZv}J!s!{R>3Gud0vH$1p61O zBn%ZGWn5Vo*1l+Kdbpg#EGmFx6>1emH=TeXy zKw{^cE5!lCV~5}Ivz{ zE~XhVKcoeE8exYHAY9C6H={2&N8Fh@i6e5NQ$^8@3&aewyVcBtB{#2KdN33d@9OMa z<6K0(@+6P>DY3TlAqk z^{_LPDTgxNF{_S9_aB1Ku3_CwOycKe}ba{2OZyAnINjnbw#0wUh!ItwK zyO`>Ifi+f>KwmCK(m)nSW2f+IM*KC0{U6p3H|43uxa0@k^Ifoc|FP*Ob9zevm%qvA z4d0*CTk~*oD(VehvD6R=ZI-OOj&t756-4LppjRyZ%V5R=wFxzyT3S~`DxzQKj(i4X zn%bA(#Yp;GQTst6F7Rf1WAs9Ht^2(cI_X%XXhL_yDc0r4s8}V^uS!ps1`13_1<9~U zUoTH)Z&5o(XKcERu-91e8aqXZkS<14XSQ}2M$S>)P8JKQ)6@y*FkcMR>i8JnoMRTP zFebusJC^Fs(a3K*IeCe26CVs;|AFNlDUne6qV}EDe5V$N#RyXq51=j*sj$yF6f^ul zZ3Py%-U&25*rmv+QW|pz%$@|Y*A4)0+t&z4ed4F~f?XPDc@y1Zl!UU2rJg762WkgJ zr3B%}k9BrLyir28a*hgm#JsmxHhU0=yJ~`W7o}VV{8pO93#Qp0^SS)2*A?jz#s6$* z^m`SD`>e$IEaQ&!Udp*;DHq4g6%{H)4JUd1ORZmfl-!z_Y~U2CBMhAU@$?Vkzc?iu zwbBpXjzwG59F>Qd`8y#87;3DbJu=(W%cJSVdYE!uXUK!t5u$vgVgAuTnQJ zU3I?qn#`V0mEGn7A0Ns2$yM(nH+wUWm?O4{%B(hGleUlg#X&R=U7j3HjhACRE2y57nJfwtJ`B zE`5O+Z08s3M$F>DSPIj?vNRVl%Ojc+dXA8XWd@&^UNaXwjcZSlo%R4YGJfH%gO((P|FSuHCO* zkY{n>2gwMY>WhKOHnz4ef2}{EWikSDPL2r=PRL6T@j3!A zvXmV;sU0B%m~f*p+AY*K_BKNOK3<`}$sp!>-|GtL(TYW5BkpSX5*R{(X(2N7T6@Yu zV;&Gq@En5})b{n#MCI|q>RZ48P@g62r%MU8KRqk^I7orE3X0pKBv3R7d?=rW;|6m;hI?Dm z()7xsqKcwbEqXwcUJ*jh4J7jckFQo|Vj1PaO49KUne2k)JNk4tSd5p3=XjQOa%Ms~ zx4XRRMA`LU_BISWW{gHAWun%Tz@s0|!#q0uynDCWt!4hw6%mEat^0>ybLxLy1)Fo( z|3k?MZ%K`sY-P8Fwxh$Y`d@6U>$|c%DgdJJ!KeDYwLM8ptks%xgg14`ZG5V)*lNC} z?!~@Tf#S&T7y~@S#RsD2mQujq0{rg20EltKQy5)&D2IvyZNjJloiGLT&;x(>Clo`A zJmVBdPf~>@CW$y1dZV;Fd1tiyKcRyQ3lH~OTV^cATc3TcC_{=~gMY(Q`mlKzL9l&| z;vgJ>P~V*PI;LRsjC08h)Oz7M*1ZogT5!)w%#ejm^j|&_*P~^?m}^n|-ZBbI6?@(2 z`2zGU7@qN0DrpyCbw;KN0fd`d-QZip0U$Z-F>@gn2C+?--)a}b;S4Ibtmu2&Nkg%z%| zAMXZ7G&2vPf*XGj(*dGF1S*sX3FO69f{+YL-8djH4siQ{IVajEc^^oj4`bLV5k$2; zkxB-V%K4%&^@lBnx?t@0tHqxU8ymjon1s0ysVbiPY-3;QJ_bnVY7f6gIDz}+1PAv^ z;>DBKpufoH_XoqT3eQ7{RgA|v1E$QOB6a2C%B+x!^>nR?EQi4yZ@Pt}a1-yi=oMts zBXxb{W~L#2IzNwuTL9bH6h|(+ay2>eE`EfAQE(Q3INEw zlw%W3pn$!y_jFq5#UFyVI4Li=OXT)q{%`$0!upRtKNgpOgYV+L{=4Bi3RrvhMDk3T-PgnGDopFLuJGya%Zn-1vC#Lpjopu|A~6s5Vl z!4qjG_kWJT2B8mG?PWhc=u{a(4c!?wNy^&Ynf7q*+E22KiMNX$Pt_o*J$BZJ#AV{u z8gp?=QY+lp>r1h#b=$HW++HzaikLs#nRZ`4+<~ss(}RDria9+fUqIm4frKitX^WgR zpZq|jqng@$HxXx=cBxEMBdZ0Ynh2110_pVM8?HD1OcYDCL)^}KLGDD|v`f~hrDUyRX1E;u zpWOnBI%yZZHv!`f4GFqL8X8n$@Y<(pfdXt31k!)pr!}#MNbZ~+bK;k7kEF%Ujk?!-Ji5li&m zvUJAwrMP+!$?OEeBy`>O*wujaiUC6hD7Qr8ZOkL!HAUo;_HCNHbUluAz$7sPgv&Z; z{rTpFwe|8ar(T(@&A#Q>7LgF%jB@|;^H*bOGgcr+`iQ+-FRo|Akd_Y2wE}eUn~%MO zy+!ALQ&Rr^P2`_{n(sWuM91fNvN^G!PbR83Lpi0}l%ahXPABy7qsz)C<+|E0O+x>$ z8$zWfyFdEh6a&p)H~!J}SS>><0S&9c-vg^dXf3>;O!SUGwK-37oVSv~xw^e2&;qLVM(B0LlvXLXvy=)- z34)Ivqvi&2M0Pr5Bxh9N37t6v8d@AJ0>?R0|9U2^wj}*h*`9fJp6Jn&4UTr&n_<;# zUTF;u?H&#&YiWrcmMnNsE*?0B?oq;D39Hr$Yu-U#MYF<`ZcY*j<0^iEl<)wl9Di#) z){!WI{PXdwcC5=3oa|ADOIM}Z|_L8wD5oEQ9V3W*5IH!U(IC@bDiajKdUiSYZ6*8Sz zb>I7lXY%GRdmJk!W{rT_IXDy@bmL=8>xjPk!F88|0wM`AcyYEWG$*gOC5Aa95x!E` z>g1;KyV=3scJ`6U13}T~gLw_8rAggzl#9(&waxhNuJ~%ndAPgB1=O45U{<3~$nubt zFRG?G9VGiQbb&Yha94kQHn;KbC-MqaN;)ZPB>1RIYHOP2zle}-$ZbEGJ(9^Et||_l zYy60jy?!bhT}d+gX@)~OQ>|9aUzt}FwZ?UM8)g%VZd^)&t9;z0f$Q$ADiy?FGK!}h zwI7_p3i&WE8=9U@FA^|fw|~<~jpMOYukv=B6aBYvk@7Bu==m&8YXy&QWOG!3 zPtg)A-gZt-oG;db`J^L5_U)2qeibwrQ6*3y&xb{XFxW>NFgeDWQX(TuJGE)E z+6$$w%Retnbt2Xu8!|4zN}>b1 z*OO~PM2+X^-jKaM&!RU_bTN2x=^RN?nT3@J zcbQO)G4@*eFuNuLuli$mE1igLI6S@!4qsp>k_grijZXf#zH9XEhP(WFhzj<;y=Gu{ zJ*>bfxx_vdrL}wqYPZcBFp}l9Eax&9&DqoP>_=i>F?tcTWew^M7ztL_`nhQF3r&}S zx$bFp!#v9{0gx7Y=~VEEb^YK+<4Ii;wq_mXm}(O=)*n@BBgnKokK1gP=XMShETep& zT6R8y3Up)1(jXONrJ7H*JY~+A@+(bV5&f`>uftzJ`qAM-AR!%Wcdcy2V*9^s%ndG z$XiOUt3rKDw&ik!dE>|EvA5~7Qttedr+wC+2jxiC(zkoMgm)^E)!K^dE>@Ji`UKa; zQrB$Sx7>v5`K?-nH7%fy1M-}1IlfnB>btTTD(m{(Q$lWwpYSGmGz5dQ#02Ube4I0j z=JbuC8qQ?e{1i(qoU}#cR@aLLPaDAh=K}&NK%lDl9Z_AjM6|W z&>({3WF{jbNuorFEjczA=qA%-XrhucHqhj7Pj$7w`+YO_u9>xF*4$a^zV(O8rn{<6 z?K%kzohOj(r`k+9F118`1fiHR!eGi^`qVEOZT zgTmm=2-(cBAQ#xa5epXhG2Oth!9G`7vB5jynYAW5OT88FFcCDVXtZnQvt@;n7VYi+ z#0B<_g6R0IkTSJKAO%rBkBgFE|D@ zss`0RetqoFb;A>|R?Va*Ot6I<+`YokSwSb2THIRJTMnjHpYh2Iz68^7kB1P*H877b zUHo7|n$)eY)2OAfM7LK?L30_`Ph8P{Aw-j=SM3PoDEEx|(eiOcJd$Rm_sKQ^QzfRK3ctuf4C5y~EJc!q+Vd6PH+Tpt#po z5Z+O6P?DL73IhRjRD7pDpC1>cVwd?`Pp#1S3ejh9VK@q&wAVJRUkl^X?>pbi-*_*) z`XrOrQI}P;eCgAv;tE*DFHLHl;u?rr8wT{QYr}kYesFn=&e7WeI;KOK&z&2$f(Yc? zLcPA`)(AC}UD~DbVb8Po(&HU<8(hUSuQPWsCF``IB~^4>=_l{6<(vdltFny#;n~m9 zs@-Cne(^cSBn?}E;vfgTfm5QrkjchGD9R%EgfIR1xKzK>sikidMMlX+dh=*GgITi{ zr`W51VBAaWhATwiQ1-0n!J;<{R#zAoC}9%I;K#f!3)nL48I$t?Jt3=7qM$+EnE3%s z++q)!@488~iDYd(v;z2!Rogm$N4qg)vM95*XUvb6IX%`DBOP!H5U`?q)$vn{>365S zwZ50^5L839u;%fsQxUK8CZ1Or7*8^MzOLDwMYSp?d1gE zy1>P@p&fY@8?V_{kO=4AEFk1P3%4)vG+(1F>K?OBwy3HCw+Y7bwyTDI6?5})y-_b{ zXt!MX(#%(p_S8#*=cua`YcC_BllxHmyNokVnzcD62|a<3CnXLsDkoiHb zouuN@@+>dguJXwARe92tKFF~S`W8baM;uM|kEd#-&z}=>dFrwK>${6YgxIH}tq%;R za=n_F*C2GiYhP%L>Dz&*sYjUy^wPi9Xk$Wh^0fwd!9+YO#o?c#sXp`m+2CyWP!v1v zi}s51sW685^6f8(*BygY*YumG=p_y5+=?a0v+pi6``%rs<^Lu9t6`8bkUedsrwK8W$ zvsWqPyViJ&7*4b~5(PO+8q^gj#}HcnP?q%GXIa49<22u$W8DKt2W-NmJT~kYNL@ zV~1*7?UxlAm|pfM;}vKi3GOJ8CD?vyj%(N|z2wKMd^15evXN>V7j?_i%`7QcMV|O6 zbXZ`a*n4khZODCZCy00B3-A5h91nzzO2zR%{EtV(#RQef7 zvx8{94FVK=m!9K&(x1r31p;Q%9`4P0fqdsMa<@hy#9-ERj`Jq&%g2DHu?z>Pj4|bH zREg`=ME5|M&8yBAP;CVd)S%@jy+K9+sTyiV+^(&nbG^2SGgjN@UxYk2KjR0P2!9m&pCk`IPs@q+)Dij3nOq}%z0zl?Q)rqa?XFap-&Z}J9*-HUt$T!EinF-6 z|5G}0KWM-MP*(t^ROD*Xz6cCew}@-ecUzrXDSp`7Ig5i0-ffV|gi9j)4ef(ldWpbdHBHj3@? zO0ie-?s%z>*vEIOa2+%ew8cnQPtZ9k8~8wV8=ds^?(HD{I`$F*p`gDcLDKN!py-_} zj_vY>99DQQ<_CK*#zppSiH%K1zb&+#*Odl;PlkjbO%}p^k_wgJ!_;fc>x{}uG!u-E z0>&Q&ccqtg(SKpzH_;#>N2wA(RitQ>W;JxGEeRlx@subTHdR@P4h!Q{S5w33bP{`t z?qi`kOA12g(M7S>h+@Rp3g^hk#UUsM+TqDn>3x1}jc<-m;!R60?ANIlR7fp9vfqC< zeM;2cQ|tT}21#w<_hOrQ#UW@_dwCH0*nAfsG()zs#UQfm`;X1ddvZfVx$)T_4!&m> zxLx@lxoyv@5!d!=g}b%$Y2{^PHXNYcM@jkWDGwFZ6;ADVf?@6Dx}$w(E(Q|aog8Mu z!$YJD^!*0>3H0Yi{q?>|!z*K*36ZIni9h5dho(c*A}-Ejs+p2C28pAfAa<&O>qkbG zH0LG@A#wV;By$&ecP85}TxUBzYG2{8*f%97ENj?y#gCMczccdc@F}*c$D(qm7u=bL zTUQzc%~>9V4+KGYelcqwqo_47a$~lrA1ef&W@i*Q?QVyYPbTcX4~ln5<#k8Rmfxqbsrc7{Zm`;|SyK_xlwUs=+#ngZ4(KhKS0C zfh&QE`Y#N4Z%dK%E5_C@`>wn)N0&oR2G~`Un2%z*G;p|+EG}*Ksqc&O8b)kiKheP} z9CcI{R(1>*W*m(;q7fLX@|s`akJ)Pp+-Tcc1MiKxzy}!%klAT+Hs4{sFUPPacbqMx zHGy%t#`n&Jo20*^fRcv5D8A&os+uz>Uu2Up-}DPQqIo2l5CzW-)DkPgt=z3uo2KYs zgZx9)@b_^OTQ4g0YW!7=8~g8uSF)}xN_+IK{;&!+iqigZhg?^pE$EAJGf#478#i0P>na$uY>ta_6-Tq%|#s%l;YaiM^4F;L+#teou8Mf z^}tjg`HRj>$~D_KGcCbNHCd6{j)Nv!|9n1^a@72FbDe2pPz)X)8*7vGn!hgpdu=oF zYZ{ms`6k*gaqK6!yh3QyoKhZ)(*cS-;qL!#M95s$PU8y@Mn6$d2}QoLq|jJ#t8@>j5jJajJP1CgN8inHv^~@8RS4UC2wsK#MB`5%b}b$&<<*IxzqAV+5ARPA zzK}O{*rpPb>s9pnEy^RzkIL5)Ae^H`3q0s&>-SSgOS&Rr$Loe(l1XK^I{q<0=Gzi5 zqrxG;TbS-wl(%k8M)V!CURT zWa)fFnvFC?`}s|wGhb8RX8k)W$Sl_n2mdC9oqpe|P!@e}Im!~&yzJ+vtLNonjJ)dX zG`lt;WUY=*Tl09|^R=j3YR=#|{dGZghQo%7K5r!SNCg;=->TOrZe%2gSsqm#&2daI zq-tZLEm__>2r-~X<;-VQQjDbdFU7{ks`L#>m)w4MRMU83DP$;~T7fJL-U`xT)IHZX zuY$~TrsU$wadt`EnMTwWcBg1JWh+)}fK~+Xi)YmiRD6z4C@~I9^W0gA9Nya;M@wxz z8_&Y`T|Z7GgZ6(yT#^ujHFLKB^|?jH#q*c_>It(w3=C+~#RYc?MhAvhc?6VvS9vZD z;Y_5jd_P4Fi+`OGCB@Y*3D($$PkMbZdupY;*)Z!6hD@ux^qYc{IVqn}mV)zwG)$S6 ziVDA6YU7@*<8zto^`or~mbVRmOd9{~r(KhUO|q<-cb+x&mph{Mpb9sIN}8Nr*qR_I zq14kaJz#@#n2H8JOwldREYKCa@O#eo#KdEyO&p8=e$?~4$}1s&4Lh>CrEvf2(Ss!~ z>Y*gvDMCKk`33%u#s7efCYy}G2wzHl>@O6fpKiFw95xDCUH6wD)wj=Ij!vldaP_LH zsMWL3muxuwTYS)SEqjS7PEBruBjxIri@D&e#20}RcTo3C!#7!O=RTio&)U7(!`i2j zdP%!jXu(LhZg#tpY!MM0#Xgzw^@nCBrgrVTGdBN{0Mix5|Bl zUDv9mlEye`)B$*%ZrI9lckDv2OgE1d@fN-3f2gUb5zR395R~zuBs*;ALPR%I*!n5j51`cO zY>qFFQHJG^-uExInO&f<*gJ<7h8?%ThVR|OMUrtJY_dUkKy%kI(^mugb}8fFSp_!j z8@b8Oq!C8#Qyu2tR5zL}pCbwHg4zl!8k3QY9pKql{BO4aI$bE>EE9=!bUiSBR&@z? zz*gmxqKwS0sDQrFN7}q)`El-k8E4%#g}d^X>($sNf1XCTUf4A_uNCSUIk(aD))8>% zzViPh%8Ai1Z0|Amye^&4T$0_}cg=JUN-G-SN(wsr=8^msD;%DIV>K;&Z{@2)Q5*z% z28&=1?&br#xDkC8U9XtRIhy%ifg6PgMK1z++c&?Q?xe}ebjtnOk15bJ?lJtpom8ls zd+nN0nTE6NT#(;|=_64K)rME5!Ue3v=EdOjv*g2iFeGZJw2wI%$&-YHo;Stry)3f` znIv&j_1X#!f5gPkeW2jBC?Vs|O#gm)#$h&~9`la!tuEjz3t<@n+ zn?f$k1~E5z=C;@D;IC^Y8`8x5X@;bb4OWEqtON-aJ(kJg(v8qi)zu z!1TOIfv~n+TMijI&VDJlY`VzF{GG+H1kfTqw-#a^=t$XROipXf)#SOyna^BciC!NQ zN(L*L67y;2v`&WBEw$gh_M(9ofXcBOneneu9;%oOOHe7o4 zdiNSknIf40TQqFm&GGnkQ+u6JWJ$4k&SETZlP>iuKrW`|3RjOw%l^{@9_z>1F9ueI~Vko6SPLW+4ogDOA0Y6wceu1tYS-v)?O*=;*RZHb_fRs+6WpY-)o)e`Xb53XvwKe%&VL zkO8Ng+&Tcz7u`j_qgoh}l&ExfhS}$QcF7@Q_^U(oZ!B?d`zWMln-KX7I1(?)Gm8zA z$y;{+2GW@t&%O~3{x^;^szmRJ|9ref~Z_$CR9o$aBp>BeoYLRwVXd&o)P(<5dvCDP zwZFJIreEsi_a!WYdb|fw9u~(`u&SN+8aigkYx>99?ndfyRAedzdiJO`VCOS zv6Q(xgIu;pw%50Mj<5$l{0>vXpt54e!6fkzrIdA``C+ija2({|K)Lwv>H#nAQ*W$T zaZ=0P%{%)?Cn+7;RWjJoV{p1VA*ew1fRVxM2Bc$jpG`}f}g$Q>Jl&L=w7nrC(V1h36 z7<7Pwf;1qb|ZAYC_F;wWIp?%*DNZ)2rnZ`nIj zVi`soKvfLLRRsRZp53C?on)ouHzCN0h7-J%3elFr)B<;e0*=$yFU zyeuH$v%0furh4kGgu1q;)bY@iY`2-Q*i0GE9Z9}-&6CpcQO7eW^YqlX`wIO;@~~l&BUgPg;^lo zMEP8(&?ktj5GoS@5o>UGvj3>+{U}N@C~qE6g8neY*-o}brf3Dq_=PRhFcAxW`r!g4 zf+Vs5=Ck@9rq?s|BLsztrkN-su^vh>_xFQSW%feh@7T`0rv8MG8owYH3tf``F=deN z%wA1+4aSN+Q&>JnR%E94ptM7(L_7Iy24)d3Kym6HD&%1>np1{hNOQBL36wwzg}L$0 z3y8X|OpTd0rKrG@+3PjJ+iuRI1RMLBzp(wl)mjixyLYkkB1j(COR{8^Bia% zuMSW2LF1Br@IH8(m0yTZ4YL$cs&UhbnKP9zYE@r50%#?OA#mJ+a%R5WYM@$PEiCRaCBrWxQI|Q zpFy60&KQ%2AG7lIWc$yB6j)@7AINiqDpic;0{lWd;;7N&OPO4P2c;Ag)MbT0&~Oxc z*$M2=H(n#(e}xBr{y#!}H<4{uKzINQn2vIHq+DmOQWT@Td6SRI4oEry&prUZZy%+! zG_|--pZrx*v-Yy;*inG?6dYQd@V!2@x;Ty6d2g-uSMQ*9<_(9N&Ou)@vcMNkJerr` zNH~iF9evsM*0+Y87rkb41}hL%h$c4`3^~a%G4d{X^7^F7_vr}-5c}=+W~-}qiisYI z(3N}yDOY+zb29=PKqH0osO{tdS-xOhhQRBx_j9j}m~ebBT5d_N*qEFO{rABVFJlQo z#I^jOOh`ghnd{hY=+K$5aO_{JsCM1j8XWfAX~sF5rbE+CrYa&*S2r?JS4T%zS4RVV z&}U$0-fnLr!F#J8G!9s(sNOOL+sgZjwzkmFwzjC<7&l&U*^NFD0bM;vA{gp=EiV}A zYM@hR@scEHde!O51uZY@qFeCL4KTm)-n%8{dQkPo z$r(dU%Yl^eSl?MY#THy$O_{n!LaZERq%F(M2MhGivpFOK42 zb!6&4-hj5NI8PJuMdwPlt2hzKq5IVXYbjn7+zB|xpthEBiP8$ zecJ`an*^$!4xfsM7?~B=5!j857Ik;Bys|q`;p_tVh(~tLzN+i2C2xyViOl3p{*PP5 zWJkL6?)u7f9d*DkE9+pjx1-~u-|e79GdC@Lz#jK*;Mr%k3qJtqYw2ENB-#YyA-a2> zV79#jyYM`Rfzv2|Q%ODFH#=h)C)xLOYMwX}$g z5Nc6>ha#8a&fzO&wWZC;iU1UIJDZ1?DI&{OM!Q-C1IhRYZd_3t%x z@z~0<)*I-T;w}WtH)h2$+sABdSocg!YNE;eJ=G@;NFRV^k17>+Yi6n|zCNAS*eXuQ z&1Jj=$s~(F-GDG$?#!}${P^+IIuSInW*aV;jB{R{%WIQ(Rj&=*ROUJP#U4^4uvGXS z=e%-~^;6W{qGCP3CqpO@61o}!d%`ke@XzixSsNKux;ohB?N)?^DTb)2rJ0Pjf@5Id z&2a~ZyqiulMeidmJNk)41C)mZDU<2kxpQtyV}t@ubq(}Bjc0VwP4AP9JD-2$m%7Y$ z$0Xt2JDDIb(b4Ks0u`Zb0dqwSZ>Q~z0;gW|p22oDo7(oz~mm zJ3BKpH8r)_g%2qU(RaJR;Rq<|g?>}TgxAn1{S0YP#D&9nehy7`6 zLbcm-FDa5ioxFJ|Gc#xgi&HpIVQ*X8;}9e5uj=w0mQaBTSuMz$;A?{nA6e z4rIWu&NT&o5m3i4YRkHp9fNN2ID)JoP27ni?6~%Ijf5HK#vnyRFTPcta6z#_Q{w+M>II>mx&6V9cDIl%(28wc%1`{NqJ=yhhhICN5{S0wzl;GA}phoV{OS{VR7pj zUDA>3N&-04YT2Ah?0!;hipxnARnPU`om!;P(`)+P((?8#fl!kOsrc1$E?e`2NvGAF z^n~1ToS9@^@p{3oKTluPb*myGF)t6`Zn!I)((4bO<#*6}Z!L274tQ)jIxN?RA@T7N zSG_Rd4%yDS?drNl5(~?KZ9&T0MB+zG3_c!Zi_`t&i9d!RwLs#%=Gb`xBMMjx?YDDM)TZKbcnY?H6?F(xhJ<7 zuPeIGzWCZTkKK(QcSzqTn@mWxVrPDVP%+VAdWCo*?`?i`M(C@7GEfW@Ck{`~aQTZC z?5F)kD+?~A>SMW($euf~%Ui3|rxyEr8kE>87t~k=RCgSC7w5`Eq|h_?g#0mqZYFdV z%~Xl?IL4i>reX9}t?oI+4GRPBpl0Od9eZpS8y`nl1;nu>Tk~!A*_BNiLJf1E#Sh1G)cO27FBgS8x?iM5qjxl z%MfIEhNUq8swZMP^ftTq{SaypHXP(+((w3<6!g!;@q(ODUz)DAv5q4F)abvlo zgH*4ZjkKHy1H`oF)E@=XnN)i%PmpHq6R?*g!H)mrfcLJ zjNH*aJsYwX7iFTqJ)gQRu1-91cDT%-S{SlrVUHHnWes89)skW^>d>%t_1M|4vXwK4 z=r2sXZJejW3rPLo>;EAhZ%jI2?|!KLvuyuWb5AGQ2X|}^p1&%4QW0RyK+muStS8Uu z*OqBHW&3xej0~rFDr9sb^RXGvx{%j#t{Sm7bhd4`&zgOR(0iRU02WBz!=`5ecS{;D zSl9F3I8S=cUM!vS=1MJFf)u9A_4khn%K`6pwj-X`OL%ONGy9v(o^CsIqP|}eqJC&Q?@@PXe&Wy>{`Gx_#E-Ky}Jk< zZvm@{gv(b}m6~@I92}tI8wZC)|B{nw<`;W+p>B2QxcKW*w)W4=dDhr2K6!nliz9x7 zC1WHzBfnpF*r>?v_IVcm@qt=j-Vqm_=uP_mK>B2y-d+&1sfYcj!mO@oc8Yy5 z;{!E&>XoG3(gx=`+tz{HC!{@nQTO~=&kN&a!6$?OK=wJSz( zF=uoy9|;IAJm2`?l}Tj;;v}s(ef=q^I)wNy58*2dSe{%H(kPQC z!kielu6{5`w|!_B6di+PBUn2>|Hat97v9`99lXwk(b?Krv@YT{+$mPJzy5%L5~CT= z3Tf)Qshh3z?cBZF2hu_twCk^C9-!5>WYLLK|D`AIj%*9}D<~x5d2+M!KSr3e8TLFA z2;;73E^`1Iy?Bowf(~Ck=ZYmxPl&7SmhYLeF7+7d8qKnA44%BvFD+isAnsxlGSrNO z5P&j@H9-8A$AMs)yQx9U>|3)d5;;+Ay|=9VWIkiskP@~eKe;ZEY@5N6ksQN-0AdF^ zUh}NT#Rv4^HC%}6C4VELiPXhffj>42WfuTH7OKK95K zTbn9mpZ6~Ray@}3m(F=X!sY3t+JR4TTd1!Mc%Y^T1!sjS!Jct}jcgaXyjth-M2w=n z2YbH=9?o?zJESqWua?_^Ha$&h`|DuX>L==hx3*FS2AWJlQB}07!*{!ZGqs}4UB%; zC1Pk}?9nK4L83yfDZt-7*J~xyMvqWFa-y0;@g%F*vy_gIyOUb=SpWX? zG!wr0kN6^^%k+U~v&Cnh#BS$iXXoZ-J1$PO@-3B(^j$`ELU<&Mb=|nAQMvw2d|$2Y zm5@A?<0xZZ`D~Bf5a^LeM2WGU_1YLu*j*Wq3_^-yAI$G|HZ9bAbW}6))4uW!6b@JD zR9zIuT<-!miw)8(UQkJ;4$>q!VXFD-Hb(~w#9vsVZEix{q%!6xmh9pF`qAAT>3AaT z1LYd_+1U^_G`e=!F!~HbjHaSWS%JInw34=^-}6n`wbDJnZ6O81WcI}FocR-6!N}JK zGe9ED1V_85z$rfVg8Y6Eh_9|LJWGR`A+@VLL639Oo`yoMENqJtnF@-E3JpT`v?@>J zr7`ZoAw5=c)xyieRxDsR?Xa*9hhGkvon>a8ouyg68Js&K?{;R-3dwv$p-y`|VzAzJ zgsZvPxD<5Vny$_KaW>UUP}u3uFZ|ApfIK_zCT5NlOhne$=wO1P;W!P)_r-LTpoOV) zXS{8T24v|pkRchWGrl2v0fq)5%m)hd-`F;VWIteaC2K zmj~rsOt9d0U4z1(eT`5~FV*|dsL|5m;=H_Koy)=c`ufJk`cIsG+&2#mvUB+Lt*-9H z{)~{rg^CBvgYesNZ+_h8E53z()(G3se5U8mS9yq%$Y|IFKJt30Yk^B5bh9vPCOO@= zLj4%6ZEYVvw#{vj)IDXJm>`#FczNbc_qcc3&66Xk^I!;6V=`nrTE@54+--AvV|p6Y zSDcr$PulFp50=2@w~!DL-_lg0g3dP-t^mS$rLwbq!8CLacQeY3cO`R%W&?K9n$?CN ziTd3B)7S|Czdp8rhZAzq%y7Ts#6cI9vM@J!X})$Gw_kXW<7eZggc2w`4Ja|lKJWXY zRkg^%X098;!a7+&K>!@XH0wjsqRI(_fKvXa@lc7x25>#UWMcf9rlxjxl9MwMXdMmh zj7iY%sK{6w<1KiyWavKVT3?6id*?BD?iUD~-*KA+S$D0n8HzuOY0tDo@|ncO4Z`lL zUv;_(jr5oJlxi3)$>{0Nf}M>WRkDzLX2OAfb)e50`vAi#XJkSybAP{?(v*lyYRkiL1o zwwb;rbHmPw+~{)p2(e|bkC-)(pRn)#Ax#pG=d-0(r!tKh?#qYk8ldx_ z=fAH2kJAA5p!V+F4Vt_nERGhCJFP!HJaO6wr#`bX9U?8|DYlouP^SMEEy~EyOixQ& zU0^*wR3Sogn`YD0Wb?Xpvujw0tv+6f}HEk%Z4FCs&SQ54}$zL&eAgV=BZ62miIOE!=v)^GRiS26xxygLGVV!~JJx*QR%G$8e&4cEKb!9DQB;xm z&LuiLki($lP~cHJ`oWNu}uX?DP)Wbu%?>V)|*k zE?{aKL7tNyZjn@~c@%_izm;qzyV{bILhLFmUR@{l^|dUBJ}X>z5BCZ5Fe-aC-5$_-FmYzMevc<8OSrjn(%eC8M$+WPn0tm!ImF**H;$na*e-i{YT;yn=*Z zu-m_l>3Ct;;zl$G?kR+slhN0(0S=`O{il1XKPZ0d@taKoLwA|CV!3r4(tE7>#mbeZ zaO&v|_{+A0=>iM`x-_nS5<`N0@_QHv-?N+;R8_I7^0t60A1oN|=L zs|z0+P=uu~k291_5e^;vh-IO}w-Q@?N<-7%?hZBy! zotQ)$$0XoI+rH$By2O^m#pmnGzg8q7vpK!G=Z~3ZH5mq3dqAM5n#pZ>`-3-wldhXq zxsjH}XV#dcJ6x*ljpeclambG$Ce;xEFA#)}HMbJ_#lH*WR*hsiXJ1NMRg>u0Ej`u} zCk_hE%kn{*4cQZ)cgtQctKE87X{8;_!ZcztJ$AO<1T!)dq7iPWAEBE zo_w}<%&eh6=+Q~c*7S=iFwvL?<&wHpsh-lZia48tnWoi zuwFZOeAfU?^j*Ri_KvWmf4>d2XJV|{t7PzvKBE;G~StqGr3u)SqOT!Q^|`O~ph z<&UxpV6Sj)Ab>y1-ib-^TCmB0ItzY5C2_6_csNk}$-w4XI=o`JCz!?nSJ;eSD32v5Qn>+;0F)gk#)s1-{OB%YsH6 zKFMY$PVb(CQe}v)={?(#k~bsXO#Tie7+F!phyihX%x}*13@>cXOvdfI%%eX$H}x)^3A?qA#3rqHonLh0`{A%KNJj++1p%{cbW~fT(s#ev@y4iC zEnzxAv+N@U**)X@xSv)Y_5y;r{m5g^EkY)Kjk{6V4r|DEbVL4~7u2Q_AB}pgP zX;km4vB9ZVndw1T#SCwhU$XN^ObSp z-iTC7fVM7D5w~PUC%bI=^wQ=|EQavp#w^AM^(5exo{<6W-sszE%+4?bn9}BwRa=NT@};~kp4}Ts8y|o3Jkm;{CS9Qs6pU%rpTnK7#GQAD^@?#W>6>6}g%4fH?S-m|SCLMyyaI&XN>4sx-F!M260sE3fZV024A! z02(&crcuoSEYk~dWl46)Kxw-H#&hNN2yqcYubwHNZK;vdDH;(u$1}6jgTEevzgBx& zagz+DP}o*ZkfCwW#Cs(C5`fx`tE(WD*&2ro?Z1YEbk+g|l4hr#uF;B`Z~o+NXA3zM zKw|M;(Qahen*5PU8qVK z-QK>lrL0UbD^2*w_s2Yf<9bx@lVJ-?L?l6S%l-pn=ty7RXsxxi_DgJ3pUe86twlB| z2`Q7TppXzU`b;W`jj4DFvP4PCV50>)DJ8AL?&)O9tHTh95<2%fUx}9;%`F@zd@vmj zzEd(G5-$t9u^+Db{o8YErn*1()*0vuKHuS-hhNkSLm1Dkg~3j8Bmq#U`lz zN&ZylOGYNSp2ONVg`78G2SEo4iXfN-DYyR%TY~ZeysUbV)w@nTNIT<=<_bIGdzc8@ z|DfEXffpwE=kNddK=qFd08I1GJorZjU`P089{eK%NHP2~5B?X)U<|p}y2sp?;70T0 z-?Az%NUOiw-D~aW47e)`!Jm}FeC*#+81{A+hgiERYmg@g>f`f+QoeTG5WiQ-+W@q`t x#)9_5zutnAX`z#!np=wLSUhupPP{Ltz$$Kav0d literal 0 HcmV?d00001 diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 000000000..32d46ee88 --- /dev/null +++ b/docs/index.md @@ -0,0 +1 @@ +../README.md \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 000000000..39fab4e1f --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,4 @@ +mkdocs-material +swagger-markdown +mkdocs-macros-plugin +ruamel.yaml diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 000000000..c4b7ea550 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,327 @@ +site_name: MLPerf Inference Documentation +repo_url: https://github.com/mlcommons/cm4mlops +theme: + name: material + logo: img/logo_v2.svg + favicon: img/logo_v2.svg + palette: + primary: deep purple + accent: green + features: + - content.tabs.link + - content.code.copy + - navigation.expand + - navigation.sections + - navigation.indexes + - navigation.instant + - navigation.tabs + - navigation.tabs.sticky + - navigation.top + - toc.follow +nav: + - CM Scripts: + - index.md + - Python automation: + - activate-python-venv: docs\Python-automation\activate-python-venv.md + - get-generic-python-lib: docs\Python-automation\get-generic-python-lib.md + - get-python3: docs\Python-automation\get-python3.md + - install-generic-conda-package: docs\Python-automation\install-generic-conda-package.md + - install-python-src: docs\Python-automation\install-python-src.md + - install-python-venv: docs\Python-automation\install-python-venv.md + - MLPerf benchmark support: + - add-custom-nvidia-system: docs\MLPerf-benchmark-support\add-custom-nvidia-system.md + - benchmark-any-mlperf-inference-implementation: docs\MLPerf-benchmark-support\benchmark-any-mlperf-inference-implementation.md + - build-mlperf-inference-server-nvidia: docs\MLPerf-benchmark-support\build-mlperf-inference-server-nvidia.md + - generate-mlperf-inference-submission: docs\MLPerf-benchmark-support\generate-mlperf-inference-submission.md + - generate-mlperf-inference-user-conf: docs\MLPerf-benchmark-support\generate-mlperf-inference-user-conf.md + - generate-mlperf-tiny-report: docs\MLPerf-benchmark-support\generate-mlperf-tiny-report.md + - generate-mlperf-tiny-submission: docs\MLPerf-benchmark-support\generate-mlperf-tiny-submission.md + - generate-nvidia-engine: docs\MLPerf-benchmark-support\generate-nvidia-engine.md + - get-mlperf-inference-intel-scratch-space: docs\MLPerf-benchmark-support\get-mlperf-inference-intel-scratch-space.md + - get-mlperf-inference-loadgen: docs\MLPerf-benchmark-support\get-mlperf-inference-loadgen.md + - get-mlperf-inference-nvidia-common-code: docs\MLPerf-benchmark-support\get-mlperf-inference-nvidia-common-code.md + - get-mlperf-inference-nvidia-scratch-space: docs\MLPerf-benchmark-support\get-mlperf-inference-nvidia-scratch-space.md + - get-mlperf-inference-results: docs\MLPerf-benchmark-support\get-mlperf-inference-results.md + - get-mlperf-inference-results-dir: docs\MLPerf-benchmark-support\get-mlperf-inference-results-dir.md + - get-mlperf-inference-src: docs\MLPerf-benchmark-support\get-mlperf-inference-src.md + - get-mlperf-inference-submission-dir: docs\MLPerf-benchmark-support\get-mlperf-inference-submission-dir.md + - get-mlperf-inference-sut-configs: docs\MLPerf-benchmark-support\get-mlperf-inference-sut-configs.md + - get-mlperf-inference-sut-description: docs\MLPerf-benchmark-support\get-mlperf-inference-sut-description.md + - get-mlperf-logging: docs\MLPerf-benchmark-support\get-mlperf-logging.md + - get-mlperf-power-dev: docs\MLPerf-benchmark-support\get-mlperf-power-dev.md + - get-mlperf-tiny-eembc-energy-runner-src: docs\MLPerf-benchmark-support\get-mlperf-tiny-eembc-energy-runner-src.md + - get-mlperf-tiny-src: docs\MLPerf-benchmark-support\get-mlperf-tiny-src.md + - get-mlperf-training-nvidia-code: docs\MLPerf-benchmark-support\get-mlperf-training-nvidia-code.md + - get-mlperf-training-src: docs\MLPerf-benchmark-support\get-mlperf-training-src.md + - get-nvidia-mitten: docs\MLPerf-benchmark-support\get-nvidia-mitten.md + - get-spec-ptd: docs\MLPerf-benchmark-support\get-spec-ptd.md + - import-mlperf-inference-to-experiment: docs\MLPerf-benchmark-support\import-mlperf-inference-to-experiment.md + - import-mlperf-tiny-to-experiment: docs\MLPerf-benchmark-support\import-mlperf-tiny-to-experiment.md + - import-mlperf-training-to-experiment: docs\MLPerf-benchmark-support\import-mlperf-training-to-experiment.md + - install-mlperf-logging-from-src: docs\MLPerf-benchmark-support\install-mlperf-logging-from-src.md + - prepare-training-data-bert: docs\MLPerf-benchmark-support\prepare-training-data-bert.md + - prepare-training-data-resnet: docs\MLPerf-benchmark-support\prepare-training-data-resnet.md + - preprocess-mlperf-inference-submission: docs\MLPerf-benchmark-support\preprocess-mlperf-inference-submission.md + - process-mlperf-accuracy: docs\MLPerf-benchmark-support\process-mlperf-accuracy.md + - push-mlperf-inference-results-to-github: docs\MLPerf-benchmark-support\push-mlperf-inference-results-to-github.md + - run-all-mlperf-models: docs\MLPerf-benchmark-support\run-all-mlperf-models.md + - run-mlperf-inference-mobilenet-models: docs\MLPerf-benchmark-support\run-mlperf-inference-mobilenet-models.md + - run-mlperf-inference-submission-checker: docs\MLPerf-benchmark-support\run-mlperf-inference-submission-checker.md + - run-mlperf-power-client: docs\MLPerf-benchmark-support\run-mlperf-power-client.md + - run-mlperf-power-server: docs\MLPerf-benchmark-support\run-mlperf-power-server.md + - run-mlperf-training-submission-checker: docs\MLPerf-benchmark-support\run-mlperf-training-submission-checker.md + - truncate-mlperf-inference-accuracy-log: docs\MLPerf-benchmark-support\truncate-mlperf-inference-accuracy-log.md + - Modular AI-ML application pipeline: + - app-image-classification-onnx-py: docs\Modular-AI/ML-application-pipeline\app-image-classification-onnx-py.md + - app-image-classification-tf-onnx-cpp: docs\Modular-AI/ML-application-pipeline\app-image-classification-tf-onnx-cpp.md + - app-image-classification-torch-py: docs\Modular-AI/ML-application-pipeline\app-image-classification-torch-py.md + - app-image-classification-tvm-onnx-py: docs\Modular-AI/ML-application-pipeline\app-image-classification-tvm-onnx-py.md + - app-stable-diffusion-onnx-py: docs\Modular-AI/ML-application-pipeline\app-stable-diffusion-onnx-py.md + - Modular application pipeline: + - app-image-corner-detection: docs\Modular-application-pipeline\app-image-corner-detection.md + - Modular MLPerf inference benchmark pipeline: + - app-loadgen-generic-python: docs\Modular-MLPerf-inference-benchmark-pipeline\app-loadgen-generic-python.md + - app-mlperf-inference: docs\Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference.md + - app-mlperf-inference-ctuning-cpp-tflite: docs\Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-ctuning-cpp-tflite.md + - app-mlperf-inference-mlcommons-cpp: docs\Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-cpp.md + - app-mlperf-inference-mlcommons-python: docs\Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-python.md + - benchmark-program-mlperf: docs\Modular-MLPerf-inference-benchmark-pipeline\benchmark-program-mlperf.md + - run-mlperf-inference-app: docs\Modular-MLPerf-inference-benchmark-pipeline\run-mlperf-inference-app.md + - Modular MLPerf benchmarks: + - app-mlperf-inference-dummy: docs\Modular-MLPerf-benchmarks\app-mlperf-inference-dummy.md + - app-mlperf-inference-intel: docs\Modular-MLPerf-benchmarks\app-mlperf-inference-intel.md + - app-mlperf-inference-qualcomm: docs\Modular-MLPerf-benchmarks\app-mlperf-inference-qualcomm.md + - Reproduce MLPerf benchmarks: + - app-mlperf-inference-nvidia: docs\Reproduce-MLPerf-benchmarks\app-mlperf-inference-nvidia.md + - reproduce-mlperf-octoml-tinyml-results: docs\Reproduce-MLPerf-benchmarks\reproduce-mlperf-octoml-tinyml-results.md + - reproduce-mlperf-training-nvidia: docs\Reproduce-MLPerf-benchmarks\reproduce-mlperf-training-nvidia.md + - wrapper-reproduce-octoml-tinyml-submission: docs\Reproduce-MLPerf-benchmarks\wrapper-reproduce-octoml-tinyml-submission.md + - Modular MLPerf training benchmark pipeline: + - app-mlperf-training-nvidia: docs\Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-nvidia.md + - app-mlperf-training-reference: docs\Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-reference.md + - DevOps automation: + - benchmark-program: docs\DevOps-automation\benchmark-program.md + - compile-program: docs\DevOps-automation\compile-program.md + - convert-csv-to-md: docs\DevOps-automation\convert-csv-to-md.md + - copy-to-clipboard: docs\DevOps-automation\copy-to-clipboard.md + - create-conda-env: docs\DevOps-automation\create-conda-env.md + - create-patch: docs\DevOps-automation\create-patch.md + - detect-sudo: docs\DevOps-automation\detect-sudo.md + - download-and-extract: docs\DevOps-automation\download-and-extract.md + - download-file: docs\DevOps-automation\download-file.md + - download-torrent: docs\DevOps-automation\download-torrent.md + - extract-file: docs\DevOps-automation\extract-file.md + - fail: docs\DevOps-automation\fail.md + - get-conda: docs\DevOps-automation\get-conda.md + - get-git-repo: docs\DevOps-automation\get-git-repo.md + - get-github-cli: docs\DevOps-automation\get-github-cli.md + - pull-git-repo: docs\DevOps-automation\pull-git-repo.md + - push-csv-to-spreadsheet: docs\DevOps-automation\push-csv-to-spreadsheet.md + - set-device-settings-qaic: docs\DevOps-automation\set-device-settings-qaic.md + - set-echo-off-win: docs\DevOps-automation\set-echo-off-win.md + - set-performance-mode: docs\DevOps-automation\set-performance-mode.md + - set-sqlite-dir: docs\DevOps-automation\set-sqlite-dir.md + - tar-my-folder: docs\DevOps-automation\tar-my-folder.md + - Docker automation: + - build-docker-image: docs\Docker-automation\build-docker-image.md + - build-dockerfile: docs\Docker-automation\build-dockerfile.md + - prune-docker: docs\Docker-automation\prune-docker.md + - run-docker-container: docs\Docker-automation\run-docker-container.md + - AI-ML optimization: + - calibrate-model-for.qaic: docs\AI/ML-optimization\calibrate-model-for.qaic.md + - compile-model-for.qaic: docs\AI/ML-optimization\compile-model-for.qaic.md + - prune-bert-models: docs\AI/ML-optimization\prune-bert-models.md + - AI-ML models: + - convert-ml-model-huggingface-to-onnx: docs\AI/ML-models\convert-ml-model-huggingface-to-onnx.md + - get-bert-squad-vocab: docs\AI/ML-models\get-bert-squad-vocab.md + - get-dlrm: docs\AI/ML-models\get-dlrm.md + - get-ml-model-3d-unet-kits19: docs\AI/ML-models\get-ml-model-3d-unet-kits19.md + - get-ml-model-bert-base-squad: docs\AI/ML-models\get-ml-model-bert-base-squad.md + - get-ml-model-bert-large-squad: docs\AI/ML-models\get-ml-model-bert-large-squad.md + - get-ml-model-dlrm-terabyte: docs\AI/ML-models\get-ml-model-dlrm-terabyte.md + - get-ml-model-efficientnet-lite: docs\AI/ML-models\get-ml-model-efficientnet-lite.md + - get-ml-model-gptj: docs\AI/ML-models\get-ml-model-gptj.md + - get-ml-model-huggingface-zoo: docs\AI/ML-models\get-ml-model-huggingface-zoo.md + - get-ml-model-llama2: docs\AI/ML-models\get-ml-model-llama2.md + - get-ml-model-mobilenet: docs\AI/ML-models\get-ml-model-mobilenet.md + - get-ml-model-neuralmagic-zoo: docs\AI/ML-models\get-ml-model-neuralmagic-zoo.md + - get-ml-model-resnet50: docs\AI/ML-models\get-ml-model-resnet50.md + - get-ml-model-retinanet: docs\AI/ML-models\get-ml-model-retinanet.md + - get-ml-model-retinanet-nvidia: docs\AI/ML-models\get-ml-model-retinanet-nvidia.md + - get-ml-model-rnnt: docs\AI/ML-models\get-ml-model-rnnt.md + - get-ml-model-stable-diffusion: docs\AI/ML-models\get-ml-model-stable-diffusion.md + - get-ml-model-tiny-resnet: docs\AI/ML-models\get-ml-model-tiny-resnet.md + - get-ml-model-using-imagenet-from-model-zoo: docs\AI/ML-models\get-ml-model-using-imagenet-from-model-zoo.md + - get-tvm-model: docs\AI/ML-models\get-tvm-model.md + - CM automation: + - create-custom-cache-entry: docs\CM-automation\create-custom-cache-entry.md + - TinyML automation: + - create-fpgaconvnet-app-tinyml: docs\TinyML-automation\create-fpgaconvnet-app-tinyml.md + - create-fpgaconvnet-config-tinyml: docs\TinyML-automation\create-fpgaconvnet-config-tinyml.md + - flash-tinyml-binary: docs\TinyML-automation\flash-tinyml-binary.md + - get-microtvm: docs\TinyML-automation\get-microtvm.md + - get-zephyr: docs\TinyML-automation\get-zephyr.md + - get-zephyr-sdk: docs\TinyML-automation\get-zephyr-sdk.md + - Cloud automation: + - destroy-terraform: docs\Cloud-automation\destroy-terraform.md + - get-aws-cli: docs\Cloud-automation\get-aws-cli.md + - get-terraform: docs\Cloud-automation\get-terraform.md + - install-aws-cli: docs\Cloud-automation\install-aws-cli.md + - install-terraform-from-src: docs\Cloud-automation\install-terraform-from-src.md + - run-terraform: docs\Cloud-automation\run-terraform.md + - Platform information: + - detect-cpu: docs\Platform-information\detect-cpu.md + - detect-os: docs\Platform-information\detect-os.md + - Detection or installation of tools and artifacts: + - get-android-sdk: docs\Detection-or-installation-of-tools-and-artifacts\get-android-sdk.md + - get-aria2: docs\Detection-or-installation-of-tools-and-artifacts\get-aria2.md + - get-bazel: docs\Detection-or-installation-of-tools-and-artifacts\get-bazel.md + - get-blis: docs\Detection-or-installation-of-tools-and-artifacts\get-blis.md + - get-brew: docs\Detection-or-installation-of-tools-and-artifacts\get-brew.md + - get-cmake: docs\Detection-or-installation-of-tools-and-artifacts\get-cmake.md + - get-cmsis_5: docs\Detection-or-installation-of-tools-and-artifacts\get-cmsis_5.md + - get-docker: docs\Detection-or-installation-of-tools-and-artifacts\get-docker.md + - get-generic-sys-util: docs\Detection-or-installation-of-tools-and-artifacts\get-generic-sys-util.md + - get-google-test: docs\Detection-or-installation-of-tools-and-artifacts\get-google-test.md + - get-java: docs\Detection-or-installation-of-tools-and-artifacts\get-java.md + - get-javac: docs\Detection-or-installation-of-tools-and-artifacts\get-javac.md + - get-lib-armnn: docs\Detection-or-installation-of-tools-and-artifacts\get-lib-armnn.md + - get-lib-dnnl: docs\Detection-or-installation-of-tools-and-artifacts\get-lib-dnnl.md + - get-lib-protobuf: docs\Detection-or-installation-of-tools-and-artifacts\get-lib-protobuf.md + - get-lib-qaic-api: docs\Detection-or-installation-of-tools-and-artifacts\get-lib-qaic-api.md + - get-nvidia-docker: docs\Detection-or-installation-of-tools-and-artifacts\get-nvidia-docker.md + - get-openssl: docs\Detection-or-installation-of-tools-and-artifacts\get-openssl.md + - get-rclone: docs\Detection-or-installation-of-tools-and-artifacts\get-rclone.md + - get-sys-utils-cm: docs\Detection-or-installation-of-tools-and-artifacts\get-sys-utils-cm.md + - get-sys-utils-min: docs\Detection-or-installation-of-tools-and-artifacts\get-sys-utils-min.md + - get-xilinx-sdk: docs\Detection-or-installation-of-tools-and-artifacts\get-xilinx-sdk.md + - get-zendnn: docs\Detection-or-installation-of-tools-and-artifacts\get-zendnn.md + - install-bazel: docs\Detection-or-installation-of-tools-and-artifacts\install-bazel.md + - install-cmake-prebuilt: docs\Detection-or-installation-of-tools-and-artifacts\install-cmake-prebuilt.md + - install-gflags: docs\Detection-or-installation-of-tools-and-artifacts\install-gflags.md + - install-github-cli: docs\Detection-or-installation-of-tools-and-artifacts\install-github-cli.md + - install-numactl-from-src: docs\Detection-or-installation-of-tools-and-artifacts\install-numactl-from-src.md + - install-openssl: docs\Detection-or-installation-of-tools-and-artifacts\install-openssl.md + - Compiler automation: + - get-aocl: docs\Compiler-automation\get-aocl.md + - get-cl: docs\Compiler-automation\get-cl.md + - get-compiler-flags: docs\Compiler-automation\get-compiler-flags.md + - get-compiler-rust: docs\Compiler-automation\get-compiler-rust.md + - get-gcc: docs\Compiler-automation\get-gcc.md + - get-go: docs\Compiler-automation\get-go.md + - get-llvm: docs\Compiler-automation\get-llvm.md + - install-gcc-src: docs\Compiler-automation\install-gcc-src.md + - install-ipex-from-src: docs\Compiler-automation\install-ipex-from-src.md + - install-llvm-prebuilt: docs\Compiler-automation\install-llvm-prebuilt.md + - install-llvm-src: docs\Compiler-automation\install-llvm-src.md + - install-onednn-from-src: docs\Compiler-automation\install-onednn-from-src.md + - install-onnxruntime-from-src: docs\Compiler-automation\install-onnxruntime-from-src.md + - install-pytorch-from-src: docs\Compiler-automation\install-pytorch-from-src.md + - install-pytorch-kineto-from-src: docs\Compiler-automation\install-pytorch-kineto-from-src.md + - install-torchvision-from-src: docs\Compiler-automation\install-torchvision-from-src.md + - install-tpp-pytorch-extension: docs\Compiler-automation\install-tpp-pytorch-extension.md + - install-transformers-from-src: docs\Compiler-automation\install-transformers-from-src.md + - CM Interface: + - get-cache-dir: docs\CM-Interface\get-cache-dir.md + - Legacy CK support: + - get-ck: docs\Legacy-CK-support\get-ck.md + - get-ck-repo-mlops: docs\Legacy-CK-support\get-ck-repo-mlops.md + - AI-ML datasets: + - get-croissant: docs\AI/ML-datasets\get-croissant.md + - get-dataset-cifar10: docs\AI/ML-datasets\get-dataset-cifar10.md + - get-dataset-cnndm: docs\AI/ML-datasets\get-dataset-cnndm.md + - get-dataset-coco: docs\AI/ML-datasets\get-dataset-coco.md + - get-dataset-coco2014: docs\AI/ML-datasets\get-dataset-coco2014.md + - get-dataset-criteo: docs\AI/ML-datasets\get-dataset-criteo.md + - get-dataset-imagenet-aux: docs\AI/ML-datasets\get-dataset-imagenet-aux.md + - get-dataset-imagenet-calibration: docs\AI/ML-datasets\get-dataset-imagenet-calibration.md + - get-dataset-imagenet-helper: docs\AI/ML-datasets\get-dataset-imagenet-helper.md + - get-dataset-imagenet-train: docs\AI/ML-datasets\get-dataset-imagenet-train.md + - get-dataset-imagenet-val: docs\AI/ML-datasets\get-dataset-imagenet-val.md + - get-dataset-kits19: docs\AI/ML-datasets\get-dataset-kits19.md + - get-dataset-librispeech: docs\AI/ML-datasets\get-dataset-librispeech.md + - get-dataset-openimages: docs\AI/ML-datasets\get-dataset-openimages.md + - get-dataset-openimages-annotations: docs\AI/ML-datasets\get-dataset-openimages-annotations.md + - get-dataset-openimages-calibration: docs\AI/ML-datasets\get-dataset-openimages-calibration.md + - get-dataset-openorca: docs\AI/ML-datasets\get-dataset-openorca.md + - get-dataset-squad: docs\AI/ML-datasets\get-dataset-squad.md + - get-dataset-squad-vocab: docs\AI/ML-datasets\get-dataset-squad-vocab.md + - get-preprocessed-dataset-criteo: docs\AI/ML-datasets\get-preprocessed-dataset-criteo.md + - get-preprocessed-dataset-generic: docs\AI/ML-datasets\get-preprocessed-dataset-generic.md + - get-preprocessed-dataset-imagenet: docs\AI/ML-datasets\get-preprocessed-dataset-imagenet.md + - get-preprocessed-dataset-kits19: docs\AI/ML-datasets\get-preprocessed-dataset-kits19.md + - get-preprocessed-dataset-librispeech: docs\AI/ML-datasets\get-preprocessed-dataset-librispeech.md + - get-preprocessed-dataset-openimages: docs\AI/ML-datasets\get-preprocessed-dataset-openimages.md + - get-preprocessed-dataset-openorca: docs\AI/ML-datasets\get-preprocessed-dataset-openorca.md + - get-preprocessed-dataset-squad: docs\AI/ML-datasets\get-preprocessed-dataset-squad.md + - CUDA automation: + - get-cuda: docs\CUDA-automation\get-cuda.md + - get-cuda-devices: docs\CUDA-automation\get-cuda-devices.md + - get-cudnn: docs\CUDA-automation\get-cudnn.md + - get-tensorrt: docs\CUDA-automation\get-tensorrt.md + - install-cuda-package-manager: docs\CUDA-automation\install-cuda-package-manager.md + - install-cuda-prebuilt: docs\CUDA-automation\install-cuda-prebuilt.md + - AI-ML frameworks: + - get-google-saxml: docs\AI/ML-frameworks\get-google-saxml.md + - get-onnxruntime-prebuilt: docs\AI/ML-frameworks\get-onnxruntime-prebuilt.md + - get-qaic-apps-sdk: docs\AI/ML-frameworks\get-qaic-apps-sdk.md + - get-qaic-platform-sdk: docs\AI/ML-frameworks\get-qaic-platform-sdk.md + - get-qaic-software-kit: docs\AI/ML-frameworks\get-qaic-software-kit.md + - get-rocm: docs\AI/ML-frameworks\get-rocm.md + - get-tvm: docs\AI/ML-frameworks\get-tvm.md + - install-qaic-compute-sdk-from-src: docs\AI/ML-frameworks\install-qaic-compute-sdk-from-src.md + - install-rocm: docs\AI/ML-frameworks\install-rocm.md + - install-tensorflow-for-c: docs\AI/ML-frameworks\install-tensorflow-for-c.md + - install-tensorflow-from-src: docs\AI/ML-frameworks\install-tensorflow-from-src.md + - install-tflite-from-src: docs\AI/ML-frameworks\install-tflite-from-src.md + - Reproducibility and artifact evaluation: + - get-ipol-src: docs\Reproducibility-and-artifact-evaluation\get-ipol-src.md + - process-ae-users: docs\Reproducibility-and-artifact-evaluation\process-ae-users.md + - reproduce-ipol-paper-2022-439: docs\Reproducibility-and-artifact-evaluation\reproduce-ipol-paper-2022-439.md + - reproduce-micro-paper-2023-victima: docs\Reproducibility-and-artifact-evaluation\reproduce-micro-paper-2023-victima.md + - GUI: + - gui: docs\GUI\gui.md + - Collective benchmarking: + - launch-benchmark: docs\Collective-benchmarking\launch-benchmark.md + - Tests: + - print-any-text: docs\Tests\print-any-text.md + - print-croissant-desc: docs\Tests\print-croissant-desc.md + - print-hello-world: docs\Tests\print-hello-world.md + - print-hello-world-java: docs\Tests\print-hello-world-java.md + - print-hello-world-javac: docs\Tests\print-hello-world-javac.md + - print-hello-world-py: docs\Tests\print-hello-world-py.md + - print-python-version: docs\Tests\print-python-version.md + - run-python: docs\Tests\run-python.md + - test-cm-core: docs\Tests\test-cm-core.md + - test-cm-script-pipeline: docs\Tests\test-cm-script-pipeline.md + - test-deps-conditions: docs\Tests\test-deps-conditions.md + - test-deps-conditions2: docs\Tests\test-deps-conditions2.md + - test-download-and-extract-artifacts: docs\Tests\test-download-and-extract-artifacts.md + - test-set-sys-user-cm: docs\Tests\test-set-sys-user-cm.md + - upgrade-python-pip: docs\Tests\upgrade-python-pip.md + - Dashboard automation: + - publish-results-to-dashboard: docs\Dashboard-automation\publish-results-to-dashboard.md + - Remote automation: + - remote-run-commands: docs\Remote-automation\remote-run-commands.md + - CM interface prototyping: + - test-debug: docs\CM-interface-prototyping\test-debug.md + - test-mlperf-inference-retinanet: docs\CM-interface-prototyping\test-mlperf-inference-retinanet.md + +markdown_extensions: + - pymdownx.tasklist: + custom_checkbox: true + - pymdownx.details + - admonition + - attr_list + - def_list + - footnotes + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.tabbed: + alternate_style: true +plugins: + - search + - macros diff --git a/mkdocsHelper.py b/mkdocsHelper.py new file mode 100644 index 000000000..acaf95e2f --- /dev/null +++ b/mkdocsHelper.py @@ -0,0 +1,87 @@ +import os +import json +import yaml +import shutil + +def get_category_from_file(file_path): + try: + with open(file_path, 'r') as file: + if file_path.endswith('.json'): + data = json.load(file) + elif file_path.endswith('.yaml') or file_path.endswith('.yml'): + data = yaml.safe_load(file) + else: + return None + return data.get('category') + except Exception as e: + print(f"Error reading {file_path}: {e}") + return None + +def scan_folders(parent_folder): + category_dict = {} + parent_folder = os.path.join(parent_folder,"script") + for folder_name in os.listdir(parent_folder): + folder_path = os.path.join(parent_folder, folder_name) + if os.path.isdir(folder_path): + cm_file_path_json = os.path.join(folder_path, '_cm.json') + cm_file_path_yaml = os.path.join(folder_path, '_cm.yaml') + category = None + + if os.path.isfile(cm_file_path_json): + category = get_category_from_file(cm_file_path_json) + elif os.path.isfile(cm_file_path_yaml): + category = get_category_from_file(cm_file_path_yaml) + + if category: + if category not in category_dict: + category_dict[category] = [] + category_dict[category].append(folder_name) + + return category_dict + +def print_category_structure(category_dict): + # print(" - CM Scripts:") + for category, folders in category_dict.items(): + category_path = os.path.join("docs", category.replace("/", "-")) + # category_path_formated = category_path.replace("/", "-") + category_path_formated = category_path.replace(" ", "-") + if not os.path.exists(category_path_formated): + os.makedirs(category_path_formated) + # print(f" - {category}:") + for folder in folders: + folder_name = folder.replace("/", "-") + source_path_folder = os.path.join("script", folder_name) + source_file_path = os.path.join(source_path_folder, "README.md") + target_path = os.path.join(category_path_formated, folder_name + ".md") + if not os.path.exists(source_file_path): + # print(f"Source file does not exist: {source_file_path}") + continue + if not os.path.exists(os.path.dirname(target_path)): + os.makedirs(os.path.dirname(target_path)) + if os.path.exists(target_path): + # print(f"Target file already exists: {target_path}") + continue + try: + print(source_file_path) + print(target_path) + print(os.getcwd()) + shutil.copyfile(source_file_path, target_path) + # os.symlink(source_file_path, target_path) + # print(f" - {folder_name}:{target_path}") + except OSError as e: + print(f"Failed to create symlink: {e}") + print(" - CM Scripts:") + for category, folders in category_dict.items(): + category_path = os.path.join("docs", category) + category_path_formated = category_path.replace("/", "-") + category_path_formated = category_path.replace(" ", "-") + print(f" - {category.replace("/", "-")}:") + for folder in folders: + folder_name = folder.replace("/", "-") + target_path = os.path.join(category_path_formated, folder_name + ".md") + print(f" - {folder_name}: {target_path}") + +if __name__ == "__main__": + parent_folder = r"" # Replace with the actual path to the parent folder + category_dict = scan_folders(parent_folder) + print_category_structure(category_dict) From a9dcd7589433802b4f8b814d2d52190d6648cf4d Mon Sep 17 00:00:00 2001 From: ANANDHU S <71482562+anandhu-eng@users.noreply.github.com> Date: Fri, 24 May 2024 22:52:58 +0530 Subject: [PATCH 4/7] Fixed path issue --- mkdocs.yml | 510 ++++++++++++++++++++++++------------------------ mkdocsHelper.py | 6 +- 2 files changed, 258 insertions(+), 258 deletions(-) diff --git a/mkdocs.yml b/mkdocs.yml index c4b7ea550..5dedc979b 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -22,290 +22,290 @@ nav: - CM Scripts: - index.md - Python automation: - - activate-python-venv: docs\Python-automation\activate-python-venv.md - - get-generic-python-lib: docs\Python-automation\get-generic-python-lib.md - - get-python3: docs\Python-automation\get-python3.md - - install-generic-conda-package: docs\Python-automation\install-generic-conda-package.md - - install-python-src: docs\Python-automation\install-python-src.md - - install-python-venv: docs\Python-automation\install-python-venv.md + - activate-python-venv: Python-automation\activate-python-venv.md + - get-generic-python-lib: Python-automation\get-generic-python-lib.md + - get-python3: Python-automation\get-python3.md + - install-generic-conda-package: Python-automation\install-generic-conda-package.md + - install-python-src: Python-automation\install-python-src.md + - install-python-venv: Python-automation\install-python-venv.md - MLPerf benchmark support: - - add-custom-nvidia-system: docs\MLPerf-benchmark-support\add-custom-nvidia-system.md - - benchmark-any-mlperf-inference-implementation: docs\MLPerf-benchmark-support\benchmark-any-mlperf-inference-implementation.md - - build-mlperf-inference-server-nvidia: docs\MLPerf-benchmark-support\build-mlperf-inference-server-nvidia.md - - generate-mlperf-inference-submission: docs\MLPerf-benchmark-support\generate-mlperf-inference-submission.md - - generate-mlperf-inference-user-conf: docs\MLPerf-benchmark-support\generate-mlperf-inference-user-conf.md - - generate-mlperf-tiny-report: docs\MLPerf-benchmark-support\generate-mlperf-tiny-report.md - - generate-mlperf-tiny-submission: docs\MLPerf-benchmark-support\generate-mlperf-tiny-submission.md - - generate-nvidia-engine: docs\MLPerf-benchmark-support\generate-nvidia-engine.md - - get-mlperf-inference-intel-scratch-space: docs\MLPerf-benchmark-support\get-mlperf-inference-intel-scratch-space.md - - get-mlperf-inference-loadgen: docs\MLPerf-benchmark-support\get-mlperf-inference-loadgen.md - - get-mlperf-inference-nvidia-common-code: docs\MLPerf-benchmark-support\get-mlperf-inference-nvidia-common-code.md - - get-mlperf-inference-nvidia-scratch-space: docs\MLPerf-benchmark-support\get-mlperf-inference-nvidia-scratch-space.md - - get-mlperf-inference-results: docs\MLPerf-benchmark-support\get-mlperf-inference-results.md - - get-mlperf-inference-results-dir: docs\MLPerf-benchmark-support\get-mlperf-inference-results-dir.md - - get-mlperf-inference-src: docs\MLPerf-benchmark-support\get-mlperf-inference-src.md - - get-mlperf-inference-submission-dir: docs\MLPerf-benchmark-support\get-mlperf-inference-submission-dir.md - - get-mlperf-inference-sut-configs: docs\MLPerf-benchmark-support\get-mlperf-inference-sut-configs.md - - get-mlperf-inference-sut-description: docs\MLPerf-benchmark-support\get-mlperf-inference-sut-description.md - - get-mlperf-logging: docs\MLPerf-benchmark-support\get-mlperf-logging.md - - get-mlperf-power-dev: docs\MLPerf-benchmark-support\get-mlperf-power-dev.md - - get-mlperf-tiny-eembc-energy-runner-src: docs\MLPerf-benchmark-support\get-mlperf-tiny-eembc-energy-runner-src.md - - get-mlperf-tiny-src: docs\MLPerf-benchmark-support\get-mlperf-tiny-src.md - - get-mlperf-training-nvidia-code: docs\MLPerf-benchmark-support\get-mlperf-training-nvidia-code.md - - get-mlperf-training-src: docs\MLPerf-benchmark-support\get-mlperf-training-src.md - - get-nvidia-mitten: docs\MLPerf-benchmark-support\get-nvidia-mitten.md - - get-spec-ptd: docs\MLPerf-benchmark-support\get-spec-ptd.md - - import-mlperf-inference-to-experiment: docs\MLPerf-benchmark-support\import-mlperf-inference-to-experiment.md - - import-mlperf-tiny-to-experiment: docs\MLPerf-benchmark-support\import-mlperf-tiny-to-experiment.md - - import-mlperf-training-to-experiment: docs\MLPerf-benchmark-support\import-mlperf-training-to-experiment.md - - install-mlperf-logging-from-src: docs\MLPerf-benchmark-support\install-mlperf-logging-from-src.md - - prepare-training-data-bert: docs\MLPerf-benchmark-support\prepare-training-data-bert.md - - prepare-training-data-resnet: docs\MLPerf-benchmark-support\prepare-training-data-resnet.md - - preprocess-mlperf-inference-submission: docs\MLPerf-benchmark-support\preprocess-mlperf-inference-submission.md - - process-mlperf-accuracy: docs\MLPerf-benchmark-support\process-mlperf-accuracy.md - - push-mlperf-inference-results-to-github: docs\MLPerf-benchmark-support\push-mlperf-inference-results-to-github.md - - run-all-mlperf-models: docs\MLPerf-benchmark-support\run-all-mlperf-models.md - - run-mlperf-inference-mobilenet-models: docs\MLPerf-benchmark-support\run-mlperf-inference-mobilenet-models.md - - run-mlperf-inference-submission-checker: docs\MLPerf-benchmark-support\run-mlperf-inference-submission-checker.md - - run-mlperf-power-client: docs\MLPerf-benchmark-support\run-mlperf-power-client.md - - run-mlperf-power-server: docs\MLPerf-benchmark-support\run-mlperf-power-server.md - - run-mlperf-training-submission-checker: docs\MLPerf-benchmark-support\run-mlperf-training-submission-checker.md - - truncate-mlperf-inference-accuracy-log: docs\MLPerf-benchmark-support\truncate-mlperf-inference-accuracy-log.md + - add-custom-nvidia-system: MLPerf-benchmark-support\add-custom-nvidia-system.md + - benchmark-any-mlperf-inference-implementation: MLPerf-benchmark-support\benchmark-any-mlperf-inference-implementation.md + - build-mlperf-inference-server-nvidia: MLPerf-benchmark-support\build-mlperf-inference-server-nvidia.md + - generate-mlperf-inference-submission: MLPerf-benchmark-support\generate-mlperf-inference-submission.md + - generate-mlperf-inference-user-conf: MLPerf-benchmark-support\generate-mlperf-inference-user-conf.md + - generate-mlperf-tiny-report: MLPerf-benchmark-support\generate-mlperf-tiny-report.md + - generate-mlperf-tiny-submission: MLPerf-benchmark-support\generate-mlperf-tiny-submission.md + - generate-nvidia-engine: MLPerf-benchmark-support\generate-nvidia-engine.md + - get-mlperf-inference-intel-scratch-space: MLPerf-benchmark-support\get-mlperf-inference-intel-scratch-space.md + - get-mlperf-inference-loadgen: MLPerf-benchmark-support\get-mlperf-inference-loadgen.md + - get-mlperf-inference-nvidia-common-code: MLPerf-benchmark-support\get-mlperf-inference-nvidia-common-code.md + - get-mlperf-inference-nvidia-scratch-space: MLPerf-benchmark-support\get-mlperf-inference-nvidia-scratch-space.md + - get-mlperf-inference-results: MLPerf-benchmark-support\get-mlperf-inference-results.md + - get-mlperf-inference-results-dir: MLPerf-benchmark-support\get-mlperf-inference-results-dir.md + - get-mlperf-inference-src: MLPerf-benchmark-support\get-mlperf-inference-src.md + - get-mlperf-inference-submission-dir: MLPerf-benchmark-support\get-mlperf-inference-submission-dir.md + - get-mlperf-inference-sut-configs: MLPerf-benchmark-support\get-mlperf-inference-sut-configs.md + - get-mlperf-inference-sut-description: MLPerf-benchmark-support\get-mlperf-inference-sut-description.md + - get-mlperf-logging: MLPerf-benchmark-support\get-mlperf-logging.md + - get-mlperf-power-dev: MLPerf-benchmark-support\get-mlperf-power-dev.md + - get-mlperf-tiny-eembc-energy-runner-src: MLPerf-benchmark-support\get-mlperf-tiny-eembc-energy-runner-src.md + - get-mlperf-tiny-src: MLPerf-benchmark-support\get-mlperf-tiny-src.md + - get-mlperf-training-nvidia-code: MLPerf-benchmark-support\get-mlperf-training-nvidia-code.md + - get-mlperf-training-src: MLPerf-benchmark-support\get-mlperf-training-src.md + - get-nvidia-mitten: MLPerf-benchmark-support\get-nvidia-mitten.md + - get-spec-ptd: MLPerf-benchmark-support\get-spec-ptd.md + - import-mlperf-inference-to-experiment: MLPerf-benchmark-support\import-mlperf-inference-to-experiment.md + - import-mlperf-tiny-to-experiment: MLPerf-benchmark-support\import-mlperf-tiny-to-experiment.md + - import-mlperf-training-to-experiment: MLPerf-benchmark-support\import-mlperf-training-to-experiment.md + - install-mlperf-logging-from-src: MLPerf-benchmark-support\install-mlperf-logging-from-src.md + - prepare-training-data-bert: MLPerf-benchmark-support\prepare-training-data-bert.md + - prepare-training-data-resnet: MLPerf-benchmark-support\prepare-training-data-resnet.md + - preprocess-mlperf-inference-submission: MLPerf-benchmark-support\preprocess-mlperf-inference-submission.md + - process-mlperf-accuracy: MLPerf-benchmark-support\process-mlperf-accuracy.md + - push-mlperf-inference-results-to-github: MLPerf-benchmark-support\push-mlperf-inference-results-to-github.md + - run-all-mlperf-models: MLPerf-benchmark-support\run-all-mlperf-models.md + - run-mlperf-inference-mobilenet-models: MLPerf-benchmark-support\run-mlperf-inference-mobilenet-models.md + - run-mlperf-inference-submission-checker: MLPerf-benchmark-support\run-mlperf-inference-submission-checker.md + - run-mlperf-power-client: MLPerf-benchmark-support\run-mlperf-power-client.md + - run-mlperf-power-server: MLPerf-benchmark-support\run-mlperf-power-server.md + - run-mlperf-training-submission-checker: MLPerf-benchmark-support\run-mlperf-training-submission-checker.md + - truncate-mlperf-inference-accuracy-log: MLPerf-benchmark-support\truncate-mlperf-inference-accuracy-log.md - Modular AI-ML application pipeline: - - app-image-classification-onnx-py: docs\Modular-AI/ML-application-pipeline\app-image-classification-onnx-py.md - - app-image-classification-tf-onnx-cpp: docs\Modular-AI/ML-application-pipeline\app-image-classification-tf-onnx-cpp.md - - app-image-classification-torch-py: docs\Modular-AI/ML-application-pipeline\app-image-classification-torch-py.md - - app-image-classification-tvm-onnx-py: docs\Modular-AI/ML-application-pipeline\app-image-classification-tvm-onnx-py.md - - app-stable-diffusion-onnx-py: docs\Modular-AI/ML-application-pipeline\app-stable-diffusion-onnx-py.md + - app-image-classification-onnx-py: Modular-AI-ML-application-pipeline\app-image-classification-onnx-py.md + - app-image-classification-tf-onnx-cpp: Modular-AI-ML-application-pipeline\app-image-classification-tf-onnx-cpp.md + - app-image-classification-torch-py: Modular-AI-ML-application-pipeline\app-image-classification-torch-py.md + - app-image-classification-tvm-onnx-py: Modular-AI-ML-application-pipeline\app-image-classification-tvm-onnx-py.md + - app-stable-diffusion-onnx-py: Modular-AI-ML-application-pipeline\app-stable-diffusion-onnx-py.md - Modular application pipeline: - - app-image-corner-detection: docs\Modular-application-pipeline\app-image-corner-detection.md + - app-image-corner-detection: Modular-application-pipeline\app-image-corner-detection.md - Modular MLPerf inference benchmark pipeline: - - app-loadgen-generic-python: docs\Modular-MLPerf-inference-benchmark-pipeline\app-loadgen-generic-python.md - - app-mlperf-inference: docs\Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference.md - - app-mlperf-inference-ctuning-cpp-tflite: docs\Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-ctuning-cpp-tflite.md - - app-mlperf-inference-mlcommons-cpp: docs\Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-cpp.md - - app-mlperf-inference-mlcommons-python: docs\Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-python.md - - benchmark-program-mlperf: docs\Modular-MLPerf-inference-benchmark-pipeline\benchmark-program-mlperf.md - - run-mlperf-inference-app: docs\Modular-MLPerf-inference-benchmark-pipeline\run-mlperf-inference-app.md + - app-loadgen-generic-python: Modular-MLPerf-inference-benchmark-pipeline\app-loadgen-generic-python.md + - app-mlperf-inference: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference.md + - app-mlperf-inference-ctuning-cpp-tflite: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-ctuning-cpp-tflite.md + - app-mlperf-inference-mlcommons-cpp: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-cpp.md + - app-mlperf-inference-mlcommons-python: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-python.md + - benchmark-program-mlperf: Modular-MLPerf-inference-benchmark-pipeline\benchmark-program-mlperf.md + - run-mlperf-inference-app: Modular-MLPerf-inference-benchmark-pipeline\run-mlperf-inference-app.md - Modular MLPerf benchmarks: - - app-mlperf-inference-dummy: docs\Modular-MLPerf-benchmarks\app-mlperf-inference-dummy.md - - app-mlperf-inference-intel: docs\Modular-MLPerf-benchmarks\app-mlperf-inference-intel.md - - app-mlperf-inference-qualcomm: docs\Modular-MLPerf-benchmarks\app-mlperf-inference-qualcomm.md + - app-mlperf-inference-dummy: Modular-MLPerf-benchmarks\app-mlperf-inference-dummy.md + - app-mlperf-inference-intel: Modular-MLPerf-benchmarks\app-mlperf-inference-intel.md + - app-mlperf-inference-qualcomm: Modular-MLPerf-benchmarks\app-mlperf-inference-qualcomm.md - Reproduce MLPerf benchmarks: - - app-mlperf-inference-nvidia: docs\Reproduce-MLPerf-benchmarks\app-mlperf-inference-nvidia.md - - reproduce-mlperf-octoml-tinyml-results: docs\Reproduce-MLPerf-benchmarks\reproduce-mlperf-octoml-tinyml-results.md - - reproduce-mlperf-training-nvidia: docs\Reproduce-MLPerf-benchmarks\reproduce-mlperf-training-nvidia.md - - wrapper-reproduce-octoml-tinyml-submission: docs\Reproduce-MLPerf-benchmarks\wrapper-reproduce-octoml-tinyml-submission.md + - app-mlperf-inference-nvidia: Reproduce-MLPerf-benchmarks\app-mlperf-inference-nvidia.md + - reproduce-mlperf-octoml-tinyml-results: Reproduce-MLPerf-benchmarks\reproduce-mlperf-octoml-tinyml-results.md + - reproduce-mlperf-training-nvidia: Reproduce-MLPerf-benchmarks\reproduce-mlperf-training-nvidia.md + - wrapper-reproduce-octoml-tinyml-submission: Reproduce-MLPerf-benchmarks\wrapper-reproduce-octoml-tinyml-submission.md - Modular MLPerf training benchmark pipeline: - - app-mlperf-training-nvidia: docs\Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-nvidia.md - - app-mlperf-training-reference: docs\Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-reference.md + - app-mlperf-training-nvidia: Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-nvidia.md + - app-mlperf-training-reference: Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-reference.md - DevOps automation: - - benchmark-program: docs\DevOps-automation\benchmark-program.md - - compile-program: docs\DevOps-automation\compile-program.md - - convert-csv-to-md: docs\DevOps-automation\convert-csv-to-md.md - - copy-to-clipboard: docs\DevOps-automation\copy-to-clipboard.md - - create-conda-env: docs\DevOps-automation\create-conda-env.md - - create-patch: docs\DevOps-automation\create-patch.md - - detect-sudo: docs\DevOps-automation\detect-sudo.md - - download-and-extract: docs\DevOps-automation\download-and-extract.md - - download-file: docs\DevOps-automation\download-file.md - - download-torrent: docs\DevOps-automation\download-torrent.md - - extract-file: docs\DevOps-automation\extract-file.md - - fail: docs\DevOps-automation\fail.md - - get-conda: docs\DevOps-automation\get-conda.md - - get-git-repo: docs\DevOps-automation\get-git-repo.md - - get-github-cli: docs\DevOps-automation\get-github-cli.md - - pull-git-repo: docs\DevOps-automation\pull-git-repo.md - - push-csv-to-spreadsheet: docs\DevOps-automation\push-csv-to-spreadsheet.md - - set-device-settings-qaic: docs\DevOps-automation\set-device-settings-qaic.md - - set-echo-off-win: docs\DevOps-automation\set-echo-off-win.md - - set-performance-mode: docs\DevOps-automation\set-performance-mode.md - - set-sqlite-dir: docs\DevOps-automation\set-sqlite-dir.md - - tar-my-folder: docs\DevOps-automation\tar-my-folder.md + - benchmark-program: DevOps-automation\benchmark-program.md + - compile-program: DevOps-automation\compile-program.md + - convert-csv-to-md: DevOps-automation\convert-csv-to-md.md + - copy-to-clipboard: DevOps-automation\copy-to-clipboard.md + - create-conda-env: DevOps-automation\create-conda-env.md + - create-patch: DevOps-automation\create-patch.md + - detect-sudo: DevOps-automation\detect-sudo.md + - download-and-extract: DevOps-automation\download-and-extract.md + - download-file: DevOps-automation\download-file.md + - download-torrent: DevOps-automation\download-torrent.md + - extract-file: DevOps-automation\extract-file.md + - fail: DevOps-automation\fail.md + - get-conda: DevOps-automation\get-conda.md + - get-git-repo: DevOps-automation\get-git-repo.md + - get-github-cli: DevOps-automation\get-github-cli.md + - pull-git-repo: DevOps-automation\pull-git-repo.md + - push-csv-to-spreadsheet: DevOps-automation\push-csv-to-spreadsheet.md + - set-device-settings-qaic: DevOps-automation\set-device-settings-qaic.md + - set-echo-off-win: DevOps-automation\set-echo-off-win.md + - set-performance-mode: DevOps-automation\set-performance-mode.md + - set-sqlite-dir: DevOps-automation\set-sqlite-dir.md + - tar-my-folder: DevOps-automation\tar-my-folder.md - Docker automation: - - build-docker-image: docs\Docker-automation\build-docker-image.md - - build-dockerfile: docs\Docker-automation\build-dockerfile.md - - prune-docker: docs\Docker-automation\prune-docker.md - - run-docker-container: docs\Docker-automation\run-docker-container.md + - build-docker-image: Docker-automation\build-docker-image.md + - build-dockerfile: Docker-automation\build-dockerfile.md + - prune-docker: Docker-automation\prune-docker.md + - run-docker-container: Docker-automation\run-docker-container.md - AI-ML optimization: - - calibrate-model-for.qaic: docs\AI/ML-optimization\calibrate-model-for.qaic.md - - compile-model-for.qaic: docs\AI/ML-optimization\compile-model-for.qaic.md - - prune-bert-models: docs\AI/ML-optimization\prune-bert-models.md + - calibrate-model-for.qaic: AI-ML-optimization\calibrate-model-for.qaic.md + - compile-model-for.qaic: AI-ML-optimization\compile-model-for.qaic.md + - prune-bert-models: AI-ML-optimization\prune-bert-models.md - AI-ML models: - - convert-ml-model-huggingface-to-onnx: docs\AI/ML-models\convert-ml-model-huggingface-to-onnx.md - - get-bert-squad-vocab: docs\AI/ML-models\get-bert-squad-vocab.md - - get-dlrm: docs\AI/ML-models\get-dlrm.md - - get-ml-model-3d-unet-kits19: docs\AI/ML-models\get-ml-model-3d-unet-kits19.md - - get-ml-model-bert-base-squad: docs\AI/ML-models\get-ml-model-bert-base-squad.md - - get-ml-model-bert-large-squad: docs\AI/ML-models\get-ml-model-bert-large-squad.md - - get-ml-model-dlrm-terabyte: docs\AI/ML-models\get-ml-model-dlrm-terabyte.md - - get-ml-model-efficientnet-lite: docs\AI/ML-models\get-ml-model-efficientnet-lite.md - - get-ml-model-gptj: docs\AI/ML-models\get-ml-model-gptj.md - - get-ml-model-huggingface-zoo: docs\AI/ML-models\get-ml-model-huggingface-zoo.md - - get-ml-model-llama2: docs\AI/ML-models\get-ml-model-llama2.md - - get-ml-model-mobilenet: docs\AI/ML-models\get-ml-model-mobilenet.md - - get-ml-model-neuralmagic-zoo: docs\AI/ML-models\get-ml-model-neuralmagic-zoo.md - - get-ml-model-resnet50: docs\AI/ML-models\get-ml-model-resnet50.md - - get-ml-model-retinanet: docs\AI/ML-models\get-ml-model-retinanet.md - - get-ml-model-retinanet-nvidia: docs\AI/ML-models\get-ml-model-retinanet-nvidia.md - - get-ml-model-rnnt: docs\AI/ML-models\get-ml-model-rnnt.md - - get-ml-model-stable-diffusion: docs\AI/ML-models\get-ml-model-stable-diffusion.md - - get-ml-model-tiny-resnet: docs\AI/ML-models\get-ml-model-tiny-resnet.md - - get-ml-model-using-imagenet-from-model-zoo: docs\AI/ML-models\get-ml-model-using-imagenet-from-model-zoo.md - - get-tvm-model: docs\AI/ML-models\get-tvm-model.md + - convert-ml-model-huggingface-to-onnx: AI-ML-models\convert-ml-model-huggingface-to-onnx.md + - get-bert-squad-vocab: AI-ML-models\get-bert-squad-vocab.md + - get-dlrm: AI-ML-models\get-dlrm.md + - get-ml-model-3d-unet-kits19: AI-ML-models\get-ml-model-3d-unet-kits19.md + - get-ml-model-bert-base-squad: AI-ML-models\get-ml-model-bert-base-squad.md + - get-ml-model-bert-large-squad: AI-ML-models\get-ml-model-bert-large-squad.md + - get-ml-model-dlrm-terabyte: AI-ML-models\get-ml-model-dlrm-terabyte.md + - get-ml-model-efficientnet-lite: AI-ML-models\get-ml-model-efficientnet-lite.md + - get-ml-model-gptj: AI-ML-models\get-ml-model-gptj.md + - get-ml-model-huggingface-zoo: AI-ML-models\get-ml-model-huggingface-zoo.md + - get-ml-model-llama2: AI-ML-models\get-ml-model-llama2.md + - get-ml-model-mobilenet: AI-ML-models\get-ml-model-mobilenet.md + - get-ml-model-neuralmagic-zoo: AI-ML-models\get-ml-model-neuralmagic-zoo.md + - get-ml-model-resnet50: AI-ML-models\get-ml-model-resnet50.md + - get-ml-model-retinanet: AI-ML-models\get-ml-model-retinanet.md + - get-ml-model-retinanet-nvidia: AI-ML-models\get-ml-model-retinanet-nvidia.md + - get-ml-model-rnnt: AI-ML-models\get-ml-model-rnnt.md + - get-ml-model-stable-diffusion: AI-ML-models\get-ml-model-stable-diffusion.md + - get-ml-model-tiny-resnet: AI-ML-models\get-ml-model-tiny-resnet.md + - get-ml-model-using-imagenet-from-model-zoo: AI-ML-models\get-ml-model-using-imagenet-from-model-zoo.md + - get-tvm-model: AI-ML-models\get-tvm-model.md - CM automation: - - create-custom-cache-entry: docs\CM-automation\create-custom-cache-entry.md + - create-custom-cache-entry: CM-automation\create-custom-cache-entry.md - TinyML automation: - - create-fpgaconvnet-app-tinyml: docs\TinyML-automation\create-fpgaconvnet-app-tinyml.md - - create-fpgaconvnet-config-tinyml: docs\TinyML-automation\create-fpgaconvnet-config-tinyml.md - - flash-tinyml-binary: docs\TinyML-automation\flash-tinyml-binary.md - - get-microtvm: docs\TinyML-automation\get-microtvm.md - - get-zephyr: docs\TinyML-automation\get-zephyr.md - - get-zephyr-sdk: docs\TinyML-automation\get-zephyr-sdk.md + - create-fpgaconvnet-app-tinyml: TinyML-automation\create-fpgaconvnet-app-tinyml.md + - create-fpgaconvnet-config-tinyml: TinyML-automation\create-fpgaconvnet-config-tinyml.md + - flash-tinyml-binary: TinyML-automation\flash-tinyml-binary.md + - get-microtvm: TinyML-automation\get-microtvm.md + - get-zephyr: TinyML-automation\get-zephyr.md + - get-zephyr-sdk: TinyML-automation\get-zephyr-sdk.md - Cloud automation: - - destroy-terraform: docs\Cloud-automation\destroy-terraform.md - - get-aws-cli: docs\Cloud-automation\get-aws-cli.md - - get-terraform: docs\Cloud-automation\get-terraform.md - - install-aws-cli: docs\Cloud-automation\install-aws-cli.md - - install-terraform-from-src: docs\Cloud-automation\install-terraform-from-src.md - - run-terraform: docs\Cloud-automation\run-terraform.md + - destroy-terraform: Cloud-automation\destroy-terraform.md + - get-aws-cli: Cloud-automation\get-aws-cli.md + - get-terraform: Cloud-automation\get-terraform.md + - install-aws-cli: Cloud-automation\install-aws-cli.md + - install-terraform-from-src: Cloud-automation\install-terraform-from-src.md + - run-terraform: Cloud-automation\run-terraform.md - Platform information: - - detect-cpu: docs\Platform-information\detect-cpu.md - - detect-os: docs\Platform-information\detect-os.md + - detect-cpu: Platform-information\detect-cpu.md + - detect-os: Platform-information\detect-os.md - Detection or installation of tools and artifacts: - - get-android-sdk: docs\Detection-or-installation-of-tools-and-artifacts\get-android-sdk.md - - get-aria2: docs\Detection-or-installation-of-tools-and-artifacts\get-aria2.md - - get-bazel: docs\Detection-or-installation-of-tools-and-artifacts\get-bazel.md - - get-blis: docs\Detection-or-installation-of-tools-and-artifacts\get-blis.md - - get-brew: docs\Detection-or-installation-of-tools-and-artifacts\get-brew.md - - get-cmake: docs\Detection-or-installation-of-tools-and-artifacts\get-cmake.md - - get-cmsis_5: docs\Detection-or-installation-of-tools-and-artifacts\get-cmsis_5.md - - get-docker: docs\Detection-or-installation-of-tools-and-artifacts\get-docker.md - - get-generic-sys-util: docs\Detection-or-installation-of-tools-and-artifacts\get-generic-sys-util.md - - get-google-test: docs\Detection-or-installation-of-tools-and-artifacts\get-google-test.md - - get-java: docs\Detection-or-installation-of-tools-and-artifacts\get-java.md - - get-javac: docs\Detection-or-installation-of-tools-and-artifacts\get-javac.md - - get-lib-armnn: docs\Detection-or-installation-of-tools-and-artifacts\get-lib-armnn.md - - get-lib-dnnl: docs\Detection-or-installation-of-tools-and-artifacts\get-lib-dnnl.md - - get-lib-protobuf: docs\Detection-or-installation-of-tools-and-artifacts\get-lib-protobuf.md - - get-lib-qaic-api: docs\Detection-or-installation-of-tools-and-artifacts\get-lib-qaic-api.md - - get-nvidia-docker: docs\Detection-or-installation-of-tools-and-artifacts\get-nvidia-docker.md - - get-openssl: docs\Detection-or-installation-of-tools-and-artifacts\get-openssl.md - - get-rclone: docs\Detection-or-installation-of-tools-and-artifacts\get-rclone.md - - get-sys-utils-cm: docs\Detection-or-installation-of-tools-and-artifacts\get-sys-utils-cm.md - - get-sys-utils-min: docs\Detection-or-installation-of-tools-and-artifacts\get-sys-utils-min.md - - get-xilinx-sdk: docs\Detection-or-installation-of-tools-and-artifacts\get-xilinx-sdk.md - - get-zendnn: docs\Detection-or-installation-of-tools-and-artifacts\get-zendnn.md - - install-bazel: docs\Detection-or-installation-of-tools-and-artifacts\install-bazel.md - - install-cmake-prebuilt: docs\Detection-or-installation-of-tools-and-artifacts\install-cmake-prebuilt.md - - install-gflags: docs\Detection-or-installation-of-tools-and-artifacts\install-gflags.md - - install-github-cli: docs\Detection-or-installation-of-tools-and-artifacts\install-github-cli.md - - install-numactl-from-src: docs\Detection-or-installation-of-tools-and-artifacts\install-numactl-from-src.md - - install-openssl: docs\Detection-or-installation-of-tools-and-artifacts\install-openssl.md + - get-android-sdk: Detection-or-installation-of-tools-and-artifacts\get-android-sdk.md + - get-aria2: Detection-or-installation-of-tools-and-artifacts\get-aria2.md + - get-bazel: Detection-or-installation-of-tools-and-artifacts\get-bazel.md + - get-blis: Detection-or-installation-of-tools-and-artifacts\get-blis.md + - get-brew: Detection-or-installation-of-tools-and-artifacts\get-brew.md + - get-cmake: Detection-or-installation-of-tools-and-artifacts\get-cmake.md + - get-cmsis_5: Detection-or-installation-of-tools-and-artifacts\get-cmsis_5.md + - get-docker: Detection-or-installation-of-tools-and-artifacts\get-docker.md + - get-generic-sys-util: Detection-or-installation-of-tools-and-artifacts\get-generic-sys-util.md + - get-google-test: Detection-or-installation-of-tools-and-artifacts\get-google-test.md + - get-java: Detection-or-installation-of-tools-and-artifacts\get-java.md + - get-javac: Detection-or-installation-of-tools-and-artifacts\get-javac.md + - get-lib-armnn: Detection-or-installation-of-tools-and-artifacts\get-lib-armnn.md + - get-lib-dnnl: Detection-or-installation-of-tools-and-artifacts\get-lib-dnnl.md + - get-lib-protobuf: Detection-or-installation-of-tools-and-artifacts\get-lib-protobuf.md + - get-lib-qaic-api: Detection-or-installation-of-tools-and-artifacts\get-lib-qaic-api.md + - get-nvidia-docker: Detection-or-installation-of-tools-and-artifacts\get-nvidia-docker.md + - get-openssl: Detection-or-installation-of-tools-and-artifacts\get-openssl.md + - get-rclone: Detection-or-installation-of-tools-and-artifacts\get-rclone.md + - get-sys-utils-cm: Detection-or-installation-of-tools-and-artifacts\get-sys-utils-cm.md + - get-sys-utils-min: Detection-or-installation-of-tools-and-artifacts\get-sys-utils-min.md + - get-xilinx-sdk: Detection-or-installation-of-tools-and-artifacts\get-xilinx-sdk.md + - get-zendnn: Detection-or-installation-of-tools-and-artifacts\get-zendnn.md + - install-bazel: Detection-or-installation-of-tools-and-artifacts\install-bazel.md + - install-cmake-prebuilt: Detection-or-installation-of-tools-and-artifacts\install-cmake-prebuilt.md + - install-gflags: Detection-or-installation-of-tools-and-artifacts\install-gflags.md + - install-github-cli: Detection-or-installation-of-tools-and-artifacts\install-github-cli.md + - install-numactl-from-src: Detection-or-installation-of-tools-and-artifacts\install-numactl-from-src.md + - install-openssl: Detection-or-installation-of-tools-and-artifacts\install-openssl.md - Compiler automation: - - get-aocl: docs\Compiler-automation\get-aocl.md - - get-cl: docs\Compiler-automation\get-cl.md - - get-compiler-flags: docs\Compiler-automation\get-compiler-flags.md - - get-compiler-rust: docs\Compiler-automation\get-compiler-rust.md - - get-gcc: docs\Compiler-automation\get-gcc.md - - get-go: docs\Compiler-automation\get-go.md - - get-llvm: docs\Compiler-automation\get-llvm.md - - install-gcc-src: docs\Compiler-automation\install-gcc-src.md - - install-ipex-from-src: docs\Compiler-automation\install-ipex-from-src.md - - install-llvm-prebuilt: docs\Compiler-automation\install-llvm-prebuilt.md - - install-llvm-src: docs\Compiler-automation\install-llvm-src.md - - install-onednn-from-src: docs\Compiler-automation\install-onednn-from-src.md - - install-onnxruntime-from-src: docs\Compiler-automation\install-onnxruntime-from-src.md - - install-pytorch-from-src: docs\Compiler-automation\install-pytorch-from-src.md - - install-pytorch-kineto-from-src: docs\Compiler-automation\install-pytorch-kineto-from-src.md - - install-torchvision-from-src: docs\Compiler-automation\install-torchvision-from-src.md - - install-tpp-pytorch-extension: docs\Compiler-automation\install-tpp-pytorch-extension.md - - install-transformers-from-src: docs\Compiler-automation\install-transformers-from-src.md + - get-aocl: Compiler-automation\get-aocl.md + - get-cl: Compiler-automation\get-cl.md + - get-compiler-flags: Compiler-automation\get-compiler-flags.md + - get-compiler-rust: Compiler-automation\get-compiler-rust.md + - get-gcc: Compiler-automation\get-gcc.md + - get-go: Compiler-automation\get-go.md + - get-llvm: Compiler-automation\get-llvm.md + - install-gcc-src: Compiler-automation\install-gcc-src.md + - install-ipex-from-src: Compiler-automation\install-ipex-from-src.md + - install-llvm-prebuilt: Compiler-automation\install-llvm-prebuilt.md + - install-llvm-src: Compiler-automation\install-llvm-src.md + - install-onednn-from-src: Compiler-automation\install-onednn-from-src.md + - install-onnxruntime-from-src: Compiler-automation\install-onnxruntime-from-src.md + - install-pytorch-from-src: Compiler-automation\install-pytorch-from-src.md + - install-pytorch-kineto-from-src: Compiler-automation\install-pytorch-kineto-from-src.md + - install-torchvision-from-src: Compiler-automation\install-torchvision-from-src.md + - install-tpp-pytorch-extension: Compiler-automation\install-tpp-pytorch-extension.md + - install-transformers-from-src: Compiler-automation\install-transformers-from-src.md - CM Interface: - - get-cache-dir: docs\CM-Interface\get-cache-dir.md + - get-cache-dir: CM-Interface\get-cache-dir.md - Legacy CK support: - - get-ck: docs\Legacy-CK-support\get-ck.md - - get-ck-repo-mlops: docs\Legacy-CK-support\get-ck-repo-mlops.md + - get-ck: Legacy-CK-support\get-ck.md + - get-ck-repo-mlops: Legacy-CK-support\get-ck-repo-mlops.md - AI-ML datasets: - - get-croissant: docs\AI/ML-datasets\get-croissant.md - - get-dataset-cifar10: docs\AI/ML-datasets\get-dataset-cifar10.md - - get-dataset-cnndm: docs\AI/ML-datasets\get-dataset-cnndm.md - - get-dataset-coco: docs\AI/ML-datasets\get-dataset-coco.md - - get-dataset-coco2014: docs\AI/ML-datasets\get-dataset-coco2014.md - - get-dataset-criteo: docs\AI/ML-datasets\get-dataset-criteo.md - - get-dataset-imagenet-aux: docs\AI/ML-datasets\get-dataset-imagenet-aux.md - - get-dataset-imagenet-calibration: docs\AI/ML-datasets\get-dataset-imagenet-calibration.md - - get-dataset-imagenet-helper: docs\AI/ML-datasets\get-dataset-imagenet-helper.md - - get-dataset-imagenet-train: docs\AI/ML-datasets\get-dataset-imagenet-train.md - - get-dataset-imagenet-val: docs\AI/ML-datasets\get-dataset-imagenet-val.md - - get-dataset-kits19: docs\AI/ML-datasets\get-dataset-kits19.md - - get-dataset-librispeech: docs\AI/ML-datasets\get-dataset-librispeech.md - - get-dataset-openimages: docs\AI/ML-datasets\get-dataset-openimages.md - - get-dataset-openimages-annotations: docs\AI/ML-datasets\get-dataset-openimages-annotations.md - - get-dataset-openimages-calibration: docs\AI/ML-datasets\get-dataset-openimages-calibration.md - - get-dataset-openorca: docs\AI/ML-datasets\get-dataset-openorca.md - - get-dataset-squad: docs\AI/ML-datasets\get-dataset-squad.md - - get-dataset-squad-vocab: docs\AI/ML-datasets\get-dataset-squad-vocab.md - - get-preprocessed-dataset-criteo: docs\AI/ML-datasets\get-preprocessed-dataset-criteo.md - - get-preprocessed-dataset-generic: docs\AI/ML-datasets\get-preprocessed-dataset-generic.md - - get-preprocessed-dataset-imagenet: docs\AI/ML-datasets\get-preprocessed-dataset-imagenet.md - - get-preprocessed-dataset-kits19: docs\AI/ML-datasets\get-preprocessed-dataset-kits19.md - - get-preprocessed-dataset-librispeech: docs\AI/ML-datasets\get-preprocessed-dataset-librispeech.md - - get-preprocessed-dataset-openimages: docs\AI/ML-datasets\get-preprocessed-dataset-openimages.md - - get-preprocessed-dataset-openorca: docs\AI/ML-datasets\get-preprocessed-dataset-openorca.md - - get-preprocessed-dataset-squad: docs\AI/ML-datasets\get-preprocessed-dataset-squad.md + - get-croissant: AI-ML-datasets\get-croissant.md + - get-dataset-cifar10: AI-ML-datasets\get-dataset-cifar10.md + - get-dataset-cnndm: AI-ML-datasets\get-dataset-cnndm.md + - get-dataset-coco: AI-ML-datasets\get-dataset-coco.md + - get-dataset-coco2014: AI-ML-datasets\get-dataset-coco2014.md + - get-dataset-criteo: AI-ML-datasets\get-dataset-criteo.md + - get-dataset-imagenet-aux: AI-ML-datasets\get-dataset-imagenet-aux.md + - get-dataset-imagenet-calibration: AI-ML-datasets\get-dataset-imagenet-calibration.md + - get-dataset-imagenet-helper: AI-ML-datasets\get-dataset-imagenet-helper.md + - get-dataset-imagenet-train: AI-ML-datasets\get-dataset-imagenet-train.md + - get-dataset-imagenet-val: AI-ML-datasets\get-dataset-imagenet-val.md + - get-dataset-kits19: AI-ML-datasets\get-dataset-kits19.md + - get-dataset-librispeech: AI-ML-datasets\get-dataset-librispeech.md + - get-dataset-openimages: AI-ML-datasets\get-dataset-openimages.md + - get-dataset-openimages-annotations: AI-ML-datasets\get-dataset-openimages-annotations.md + - get-dataset-openimages-calibration: AI-ML-datasets\get-dataset-openimages-calibration.md + - get-dataset-openorca: AI-ML-datasets\get-dataset-openorca.md + - get-dataset-squad: AI-ML-datasets\get-dataset-squad.md + - get-dataset-squad-vocab: AI-ML-datasets\get-dataset-squad-vocab.md + - get-preprocessed-dataset-criteo: AI-ML-datasets\get-preprocessed-dataset-criteo.md + - get-preprocessed-dataset-generic: AI-ML-datasets\get-preprocessed-dataset-generic.md + - get-preprocessed-dataset-imagenet: AI-ML-datasets\get-preprocessed-dataset-imagenet.md + - get-preprocessed-dataset-kits19: AI-ML-datasets\get-preprocessed-dataset-kits19.md + - get-preprocessed-dataset-librispeech: AI-ML-datasets\get-preprocessed-dataset-librispeech.md + - get-preprocessed-dataset-openimages: AI-ML-datasets\get-preprocessed-dataset-openimages.md + - get-preprocessed-dataset-openorca: AI-ML-datasets\get-preprocessed-dataset-openorca.md + - get-preprocessed-dataset-squad: AI-ML-datasets\get-preprocessed-dataset-squad.md - CUDA automation: - - get-cuda: docs\CUDA-automation\get-cuda.md - - get-cuda-devices: docs\CUDA-automation\get-cuda-devices.md - - get-cudnn: docs\CUDA-automation\get-cudnn.md - - get-tensorrt: docs\CUDA-automation\get-tensorrt.md - - install-cuda-package-manager: docs\CUDA-automation\install-cuda-package-manager.md - - install-cuda-prebuilt: docs\CUDA-automation\install-cuda-prebuilt.md + - get-cuda: CUDA-automation\get-cuda.md + - get-cuda-devices: CUDA-automation\get-cuda-devices.md + - get-cudnn: CUDA-automation\get-cudnn.md + - get-tensorrt: CUDA-automation\get-tensorrt.md + - install-cuda-package-manager: CUDA-automation\install-cuda-package-manager.md + - install-cuda-prebuilt: CUDA-automation\install-cuda-prebuilt.md - AI-ML frameworks: - - get-google-saxml: docs\AI/ML-frameworks\get-google-saxml.md - - get-onnxruntime-prebuilt: docs\AI/ML-frameworks\get-onnxruntime-prebuilt.md - - get-qaic-apps-sdk: docs\AI/ML-frameworks\get-qaic-apps-sdk.md - - get-qaic-platform-sdk: docs\AI/ML-frameworks\get-qaic-platform-sdk.md - - get-qaic-software-kit: docs\AI/ML-frameworks\get-qaic-software-kit.md - - get-rocm: docs\AI/ML-frameworks\get-rocm.md - - get-tvm: docs\AI/ML-frameworks\get-tvm.md - - install-qaic-compute-sdk-from-src: docs\AI/ML-frameworks\install-qaic-compute-sdk-from-src.md - - install-rocm: docs\AI/ML-frameworks\install-rocm.md - - install-tensorflow-for-c: docs\AI/ML-frameworks\install-tensorflow-for-c.md - - install-tensorflow-from-src: docs\AI/ML-frameworks\install-tensorflow-from-src.md - - install-tflite-from-src: docs\AI/ML-frameworks\install-tflite-from-src.md + - get-google-saxml: AI-ML-frameworks\get-google-saxml.md + - get-onnxruntime-prebuilt: AI-ML-frameworks\get-onnxruntime-prebuilt.md + - get-qaic-apps-sdk: AI-ML-frameworks\get-qaic-apps-sdk.md + - get-qaic-platform-sdk: AI-ML-frameworks\get-qaic-platform-sdk.md + - get-qaic-software-kit: AI-ML-frameworks\get-qaic-software-kit.md + - get-rocm: AI-ML-frameworks\get-rocm.md + - get-tvm: AI-ML-frameworks\get-tvm.md + - install-qaic-compute-sdk-from-src: AI-ML-frameworks\install-qaic-compute-sdk-from-src.md + - install-rocm: AI-ML-frameworks\install-rocm.md + - install-tensorflow-for-c: AI-ML-frameworks\install-tensorflow-for-c.md + - install-tensorflow-from-src: AI-ML-frameworks\install-tensorflow-from-src.md + - install-tflite-from-src: AI-ML-frameworks\install-tflite-from-src.md - Reproducibility and artifact evaluation: - - get-ipol-src: docs\Reproducibility-and-artifact-evaluation\get-ipol-src.md - - process-ae-users: docs\Reproducibility-and-artifact-evaluation\process-ae-users.md - - reproduce-ipol-paper-2022-439: docs\Reproducibility-and-artifact-evaluation\reproduce-ipol-paper-2022-439.md - - reproduce-micro-paper-2023-victima: docs\Reproducibility-and-artifact-evaluation\reproduce-micro-paper-2023-victima.md + - get-ipol-src: Reproducibility-and-artifact-evaluation\get-ipol-src.md + - process-ae-users: Reproducibility-and-artifact-evaluation\process-ae-users.md + - reproduce-ipol-paper-2022-439: Reproducibility-and-artifact-evaluation\reproduce-ipol-paper-2022-439.md + - reproduce-micro-paper-2023-victima: Reproducibility-and-artifact-evaluation\reproduce-micro-paper-2023-victima.md - GUI: - - gui: docs\GUI\gui.md + - gui: GUI\gui.md - Collective benchmarking: - - launch-benchmark: docs\Collective-benchmarking\launch-benchmark.md + - launch-benchmark: Collective-benchmarking\launch-benchmark.md - Tests: - - print-any-text: docs\Tests\print-any-text.md - - print-croissant-desc: docs\Tests\print-croissant-desc.md - - print-hello-world: docs\Tests\print-hello-world.md - - print-hello-world-java: docs\Tests\print-hello-world-java.md - - print-hello-world-javac: docs\Tests\print-hello-world-javac.md - - print-hello-world-py: docs\Tests\print-hello-world-py.md - - print-python-version: docs\Tests\print-python-version.md - - run-python: docs\Tests\run-python.md - - test-cm-core: docs\Tests\test-cm-core.md - - test-cm-script-pipeline: docs\Tests\test-cm-script-pipeline.md - - test-deps-conditions: docs\Tests\test-deps-conditions.md - - test-deps-conditions2: docs\Tests\test-deps-conditions2.md - - test-download-and-extract-artifacts: docs\Tests\test-download-and-extract-artifacts.md - - test-set-sys-user-cm: docs\Tests\test-set-sys-user-cm.md - - upgrade-python-pip: docs\Tests\upgrade-python-pip.md + - print-any-text: Tests\print-any-text.md + - print-croissant-desc: Tests\print-croissant-desc.md + - print-hello-world: Tests\print-hello-world.md + - print-hello-world-java: Tests\print-hello-world-java.md + - print-hello-world-javac: Tests\print-hello-world-javac.md + - print-hello-world-py: Tests\print-hello-world-py.md + - print-python-version: Tests\print-python-version.md + - run-python: Tests\run-python.md + - test-cm-core: Tests\test-cm-core.md + - test-cm-script-pipeline: Tests\test-cm-script-pipeline.md + - test-deps-conditions: Tests\test-deps-conditions.md + - test-deps-conditions2: Tests\test-deps-conditions2.md + - test-download-and-extract-artifacts: Tests\test-download-and-extract-artifacts.md + - test-set-sys-user-cm: Tests\test-set-sys-user-cm.md + - upgrade-python-pip: Tests\upgrade-python-pip.md - Dashboard automation: - - publish-results-to-dashboard: docs\Dashboard-automation\publish-results-to-dashboard.md + - publish-results-to-dashboard: Dashboard-automation\publish-results-to-dashboard.md - Remote automation: - - remote-run-commands: docs\Remote-automation\remote-run-commands.md + - remote-run-commands: Remote-automation\remote-run-commands.md - CM interface prototyping: - - test-debug: docs\CM-interface-prototyping\test-debug.md - - test-mlperf-inference-retinanet: docs\CM-interface-prototyping\test-mlperf-inference-retinanet.md + - test-debug: CM-interface-prototyping\test-debug.md + - test-mlperf-inference-retinanet: CM-interface-prototyping\test-mlperf-inference-retinanet.md markdown_extensions: - pymdownx.tasklist: diff --git a/mkdocsHelper.py b/mkdocsHelper.py index acaf95e2f..a797bcf7d 100644 --- a/mkdocsHelper.py +++ b/mkdocsHelper.py @@ -72,9 +72,9 @@ def print_category_structure(category_dict): print(f"Failed to create symlink: {e}") print(" - CM Scripts:") for category, folders in category_dict.items(): - category_path = os.path.join("docs", category) - category_path_formated = category_path.replace("/", "-") - category_path_formated = category_path.replace(" ", "-") + # category_path = os.path.join("docs", category) + category_path_formated = category.replace("/", "-") + category_path_formated = category_path_formated.replace(" ", "-") print(f" - {category.replace("/", "-")}:") for folder in folders: folder_name = folder.replace("/", "-") From 718fd800b7d366a04197335edc26e26a0eeffde7 Mon Sep 17 00:00:00 2001 From: ANANDHU S <71482562+anandhu-eng@users.noreply.github.com> Date: Fri, 24 May 2024 23:06:48 +0530 Subject: [PATCH 5/7] Rearranged folder as per github actions --- .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{get-rocm.md => get-rocm/index.md} | 0 .../{get-tvm.md => get-tvm/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{get-dlrm.md => get-dlrm/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{get-cuda.md => get-cuda/index.md} | 0 .../{get-cudnn.md => get-cudnn/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{get-aws-cli.md => get-aws-cli/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{get-aocl.md => get-aocl/index.md} | 0 .../{get-cl.md => get-cl/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{get-gcc.md => get-gcc/index.md} | 0 .../{get-go.md => get-go/index.md} | 0 .../{get-llvm.md => get-llvm/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{get-aria2.md => get-aria2/index.md} | 0 .../{get-bazel.md => get-bazel/index.md} | 0 .../{get-blis.md => get-blis/index.md} | 0 .../{get-brew.md => get-brew/index.md} | 0 .../{get-cmake.md => get-cmake/index.md} | 0 .../{get-cmsis_5.md => get-cmsis_5/index.md} | 0 .../{get-docker.md => get-docker/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{get-java.md => get-java/index.md} | 0 .../{get-javac.md => get-javac/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{get-openssl.md => get-openssl/index.md} | 0 .../{get-rclone.md => get-rclone/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{get-zendnn.md => get-zendnn/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{detect-sudo.md => detect-sudo/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{fail.md => fail/index.md} | 0 .../{get-conda.md => get-conda/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 docs/GUI/{gui.md => gui/index.md} | 0 .../index.md} | 0 .../{get-ck.md => get-ck/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{detect-cpu.md => detect-cpu/index.md} | 0 .../{detect-os.md => detect-os/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{get-python3.md => get-python3/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{run-python.md => run-python/index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../index.md} | 0 .../{get-zephyr.md => get-zephyr/index.md} | 0 mkdocs.yml | 510 +++++++++--------- mkdocsHelper.py | 4 +- 250 files changed, 257 insertions(+), 257 deletions(-) rename docs/AI-ML-datasets/{get-croissant.md => get-croissant/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-cifar10.md => get-dataset-cifar10/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-cnndm.md => get-dataset-cnndm/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-coco.md => get-dataset-coco/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-coco2014.md => get-dataset-coco2014/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-criteo.md => get-dataset-criteo/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-imagenet-aux.md => get-dataset-imagenet-aux/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-imagenet-calibration.md => get-dataset-imagenet-calibration/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-imagenet-helper.md => get-dataset-imagenet-helper/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-imagenet-train.md => get-dataset-imagenet-train/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-imagenet-val.md => get-dataset-imagenet-val/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-kits19.md => get-dataset-kits19/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-librispeech.md => get-dataset-librispeech/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-openimages-annotations.md => get-dataset-openimages-annotations/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-openimages-calibration.md => get-dataset-openimages-calibration/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-openimages.md => get-dataset-openimages/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-openorca.md => get-dataset-openorca/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-squad-vocab.md => get-dataset-squad-vocab/index.md} (100%) rename docs/AI-ML-datasets/{get-dataset-squad.md => get-dataset-squad/index.md} (100%) rename docs/AI-ML-datasets/{get-preprocessed-dataset-criteo.md => get-preprocessed-dataset-criteo/index.md} (100%) rename docs/AI-ML-datasets/{get-preprocessed-dataset-generic.md => get-preprocessed-dataset-generic/index.md} (100%) rename docs/AI-ML-datasets/{get-preprocessed-dataset-imagenet.md => get-preprocessed-dataset-imagenet/index.md} (100%) rename docs/AI-ML-datasets/{get-preprocessed-dataset-kits19.md => get-preprocessed-dataset-kits19/index.md} (100%) rename docs/AI-ML-datasets/{get-preprocessed-dataset-librispeech.md => get-preprocessed-dataset-librispeech/index.md} (100%) rename docs/AI-ML-datasets/{get-preprocessed-dataset-openimages.md => get-preprocessed-dataset-openimages/index.md} (100%) rename docs/AI-ML-datasets/{get-preprocessed-dataset-openorca.md => get-preprocessed-dataset-openorca/index.md} (100%) rename docs/AI-ML-datasets/{get-preprocessed-dataset-squad.md => get-preprocessed-dataset-squad/index.md} (100%) rename docs/AI-ML-frameworks/{get-google-saxml.md => get-google-saxml/index.md} (100%) rename docs/AI-ML-frameworks/{get-onnxruntime-prebuilt.md => get-onnxruntime-prebuilt/index.md} (100%) rename docs/AI-ML-frameworks/{get-qaic-apps-sdk.md => get-qaic-apps-sdk/index.md} (100%) rename docs/AI-ML-frameworks/{get-qaic-platform-sdk.md => get-qaic-platform-sdk/index.md} (100%) rename docs/AI-ML-frameworks/{get-qaic-software-kit.md => get-qaic-software-kit/index.md} (100%) rename docs/AI-ML-frameworks/{get-rocm.md => get-rocm/index.md} (100%) rename docs/AI-ML-frameworks/{get-tvm.md => get-tvm/index.md} (100%) rename docs/AI-ML-frameworks/{install-qaic-compute-sdk-from-src.md => install-qaic-compute-sdk-from-src/index.md} (100%) rename docs/AI-ML-frameworks/{install-rocm.md => install-rocm/index.md} (100%) rename docs/AI-ML-frameworks/{install-tensorflow-for-c.md => install-tensorflow-for-c/index.md} (100%) rename docs/AI-ML-frameworks/{install-tensorflow-from-src.md => install-tensorflow-from-src/index.md} (100%) rename docs/AI-ML-frameworks/{install-tflite-from-src.md => install-tflite-from-src/index.md} (100%) rename docs/AI-ML-models/{convert-ml-model-huggingface-to-onnx.md => convert-ml-model-huggingface-to-onnx/index.md} (100%) rename docs/AI-ML-models/{get-bert-squad-vocab.md => get-bert-squad-vocab/index.md} (100%) rename docs/AI-ML-models/{get-dlrm.md => get-dlrm/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-3d-unet-kits19.md => get-ml-model-3d-unet-kits19/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-bert-base-squad.md => get-ml-model-bert-base-squad/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-bert-large-squad.md => get-ml-model-bert-large-squad/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-dlrm-terabyte.md => get-ml-model-dlrm-terabyte/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-efficientnet-lite.md => get-ml-model-efficientnet-lite/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-gptj.md => get-ml-model-gptj/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-huggingface-zoo.md => get-ml-model-huggingface-zoo/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-llama2.md => get-ml-model-llama2/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-mobilenet.md => get-ml-model-mobilenet/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-neuralmagic-zoo.md => get-ml-model-neuralmagic-zoo/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-resnet50.md => get-ml-model-resnet50/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-retinanet-nvidia.md => get-ml-model-retinanet-nvidia/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-retinanet.md => get-ml-model-retinanet/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-rnnt.md => get-ml-model-rnnt/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-stable-diffusion.md => get-ml-model-stable-diffusion/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-tiny-resnet.md => get-ml-model-tiny-resnet/index.md} (100%) rename docs/AI-ML-models/{get-ml-model-using-imagenet-from-model-zoo.md => get-ml-model-using-imagenet-from-model-zoo/index.md} (100%) rename docs/AI-ML-models/{get-tvm-model.md => get-tvm-model/index.md} (100%) rename docs/AI-ML-optimization/{calibrate-model-for.qaic.md => calibrate-model-for.qaic/index.md} (100%) rename docs/AI-ML-optimization/{compile-model-for.qaic.md => compile-model-for.qaic/index.md} (100%) rename docs/AI-ML-optimization/{prune-bert-models.md => prune-bert-models/index.md} (100%) rename docs/CM-interface-prototyping/{test-mlperf-inference-retinanet.md => test-mlperf-inference-retinanet/index.md} (100%) rename docs/CUDA-automation/{get-cuda-devices.md => get-cuda-devices/index.md} (100%) rename docs/CUDA-automation/{get-cuda.md => get-cuda/index.md} (100%) rename docs/CUDA-automation/{get-cudnn.md => get-cudnn/index.md} (100%) rename docs/CUDA-automation/{get-tensorrt.md => get-tensorrt/index.md} (100%) rename docs/CUDA-automation/{install-cuda-package-manager.md => install-cuda-package-manager/index.md} (100%) rename docs/CUDA-automation/{install-cuda-prebuilt.md => install-cuda-prebuilt/index.md} (100%) rename docs/Cloud-automation/{destroy-terraform.md => destroy-terraform/index.md} (100%) rename docs/Cloud-automation/{get-aws-cli.md => get-aws-cli/index.md} (100%) rename docs/Cloud-automation/{get-terraform.md => get-terraform/index.md} (100%) rename docs/Cloud-automation/{install-aws-cli.md => install-aws-cli/index.md} (100%) rename docs/Cloud-automation/{install-terraform-from-src.md => install-terraform-from-src/index.md} (100%) rename docs/Cloud-automation/{run-terraform.md => run-terraform/index.md} (100%) rename docs/Collective-benchmarking/{launch-benchmark.md => launch-benchmark/index.md} (100%) rename docs/Compiler-automation/{get-aocl.md => get-aocl/index.md} (100%) rename docs/Compiler-automation/{get-cl.md => get-cl/index.md} (100%) rename docs/Compiler-automation/{get-compiler-flags.md => get-compiler-flags/index.md} (100%) rename docs/Compiler-automation/{get-compiler-rust.md => get-compiler-rust/index.md} (100%) rename docs/Compiler-automation/{get-gcc.md => get-gcc/index.md} (100%) rename docs/Compiler-automation/{get-go.md => get-go/index.md} (100%) rename docs/Compiler-automation/{get-llvm.md => get-llvm/index.md} (100%) rename docs/Compiler-automation/{install-gcc-src.md => install-gcc-src/index.md} (100%) rename docs/Compiler-automation/{install-ipex-from-src.md => install-ipex-from-src/index.md} (100%) rename docs/Compiler-automation/{install-llvm-prebuilt.md => install-llvm-prebuilt/index.md} (100%) rename docs/Compiler-automation/{install-llvm-src.md => install-llvm-src/index.md} (100%) rename docs/Compiler-automation/{install-onednn-from-src.md => install-onednn-from-src/index.md} (100%) rename docs/Compiler-automation/{install-onnxruntime-from-src.md => install-onnxruntime-from-src/index.md} (100%) rename docs/Compiler-automation/{install-pytorch-from-src.md => install-pytorch-from-src/index.md} (100%) rename docs/Compiler-automation/{install-pytorch-kineto-from-src.md => install-pytorch-kineto-from-src/index.md} (100%) rename docs/Compiler-automation/{install-torchvision-from-src.md => install-torchvision-from-src/index.md} (100%) rename docs/Compiler-automation/{install-tpp-pytorch-extension.md => install-tpp-pytorch-extension/index.md} (100%) rename docs/Compiler-automation/{install-transformers-from-src.md => install-transformers-from-src/index.md} (100%) rename docs/Dashboard-automation/{publish-results-to-dashboard.md => publish-results-to-dashboard/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-android-sdk.md => get-android-sdk/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-aria2.md => get-aria2/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-bazel.md => get-bazel/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-blis.md => get-blis/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-brew.md => get-brew/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-cmake.md => get-cmake/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-cmsis_5.md => get-cmsis_5/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-docker.md => get-docker/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-generic-sys-util.md => get-generic-sys-util/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-google-test.md => get-google-test/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-java.md => get-java/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-javac.md => get-javac/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-lib-armnn.md => get-lib-armnn/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-lib-dnnl.md => get-lib-dnnl/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-lib-protobuf.md => get-lib-protobuf/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-lib-qaic-api.md => get-lib-qaic-api/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-nvidia-docker.md => get-nvidia-docker/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-openssl.md => get-openssl/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-rclone.md => get-rclone/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-sys-utils-cm.md => get-sys-utils-cm/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-sys-utils-min.md => get-sys-utils-min/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-xilinx-sdk.md => get-xilinx-sdk/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{get-zendnn.md => get-zendnn/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{install-bazel.md => install-bazel/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{install-cmake-prebuilt.md => install-cmake-prebuilt/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{install-gflags.md => install-gflags/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{install-github-cli.md => install-github-cli/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{install-numactl-from-src.md => install-numactl-from-src/index.md} (100%) rename docs/Detection-or-installation-of-tools-and-artifacts/{install-openssl.md => install-openssl/index.md} (100%) rename docs/DevOps-automation/{benchmark-program.md => benchmark-program/index.md} (100%) rename docs/DevOps-automation/{compile-program.md => compile-program/index.md} (100%) rename docs/DevOps-automation/{convert-csv-to-md.md => convert-csv-to-md/index.md} (100%) rename docs/DevOps-automation/{copy-to-clipboard.md => copy-to-clipboard/index.md} (100%) rename docs/DevOps-automation/{create-conda-env.md => create-conda-env/index.md} (100%) rename docs/DevOps-automation/{create-patch.md => create-patch/index.md} (100%) rename docs/DevOps-automation/{detect-sudo.md => detect-sudo/index.md} (100%) rename docs/DevOps-automation/{download-and-extract.md => download-and-extract/index.md} (100%) rename docs/DevOps-automation/{download-file.md => download-file/index.md} (100%) rename docs/DevOps-automation/{download-torrent.md => download-torrent/index.md} (100%) rename docs/DevOps-automation/{extract-file.md => extract-file/index.md} (100%) rename docs/DevOps-automation/{fail.md => fail/index.md} (100%) rename docs/DevOps-automation/{get-conda.md => get-conda/index.md} (100%) rename docs/DevOps-automation/{get-git-repo.md => get-git-repo/index.md} (100%) rename docs/DevOps-automation/{get-github-cli.md => get-github-cli/index.md} (100%) rename docs/DevOps-automation/{pull-git-repo.md => pull-git-repo/index.md} (100%) rename docs/DevOps-automation/{push-csv-to-spreadsheet.md => push-csv-to-spreadsheet/index.md} (100%) rename docs/DevOps-automation/{set-device-settings-qaic.md => set-device-settings-qaic/index.md} (100%) rename docs/DevOps-automation/{set-echo-off-win.md => set-echo-off-win/index.md} (100%) rename docs/DevOps-automation/{set-performance-mode.md => set-performance-mode/index.md} (100%) rename docs/DevOps-automation/{set-sqlite-dir.md => set-sqlite-dir/index.md} (100%) rename docs/DevOps-automation/{tar-my-folder.md => tar-my-folder/index.md} (100%) rename docs/Docker-automation/{build-docker-image.md => build-docker-image/index.md} (100%) rename docs/Docker-automation/{build-dockerfile.md => build-dockerfile/index.md} (100%) rename docs/Docker-automation/{prune-docker.md => prune-docker/index.md} (100%) rename docs/Docker-automation/{run-docker-container.md => run-docker-container/index.md} (100%) rename docs/GUI/{gui.md => gui/index.md} (100%) rename docs/Legacy-CK-support/{get-ck-repo-mlops.md => get-ck-repo-mlops/index.md} (100%) rename docs/Legacy-CK-support/{get-ck.md => get-ck/index.md} (100%) rename docs/MLPerf-benchmark-support/{add-custom-nvidia-system.md => add-custom-nvidia-system/index.md} (100%) rename docs/MLPerf-benchmark-support/{benchmark-any-mlperf-inference-implementation.md => benchmark-any-mlperf-inference-implementation/index.md} (100%) rename docs/MLPerf-benchmark-support/{build-mlperf-inference-server-nvidia.md => build-mlperf-inference-server-nvidia/index.md} (100%) rename docs/MLPerf-benchmark-support/{generate-mlperf-inference-submission.md => generate-mlperf-inference-submission/index.md} (100%) rename docs/MLPerf-benchmark-support/{generate-mlperf-inference-user-conf.md => generate-mlperf-inference-user-conf/index.md} (100%) rename docs/MLPerf-benchmark-support/{generate-mlperf-tiny-report.md => generate-mlperf-tiny-report/index.md} (100%) rename docs/MLPerf-benchmark-support/{generate-mlperf-tiny-submission.md => generate-mlperf-tiny-submission/index.md} (100%) rename docs/MLPerf-benchmark-support/{generate-nvidia-engine.md => generate-nvidia-engine/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-inference-intel-scratch-space.md => get-mlperf-inference-intel-scratch-space/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-inference-loadgen.md => get-mlperf-inference-loadgen/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-inference-nvidia-common-code.md => get-mlperf-inference-nvidia-common-code/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-inference-nvidia-scratch-space.md => get-mlperf-inference-nvidia-scratch-space/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-inference-results-dir.md => get-mlperf-inference-results-dir/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-inference-results.md => get-mlperf-inference-results/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-inference-src.md => get-mlperf-inference-src/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-inference-submission-dir.md => get-mlperf-inference-submission-dir/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-inference-sut-configs.md => get-mlperf-inference-sut-configs/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-inference-sut-description.md => get-mlperf-inference-sut-description/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-logging.md => get-mlperf-logging/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-power-dev.md => get-mlperf-power-dev/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-tiny-eembc-energy-runner-src.md => get-mlperf-tiny-eembc-energy-runner-src/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-tiny-src.md => get-mlperf-tiny-src/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-training-nvidia-code.md => get-mlperf-training-nvidia-code/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-mlperf-training-src.md => get-mlperf-training-src/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-nvidia-mitten.md => get-nvidia-mitten/index.md} (100%) rename docs/MLPerf-benchmark-support/{get-spec-ptd.md => get-spec-ptd/index.md} (100%) rename docs/MLPerf-benchmark-support/{import-mlperf-inference-to-experiment.md => import-mlperf-inference-to-experiment/index.md} (100%) rename docs/MLPerf-benchmark-support/{import-mlperf-tiny-to-experiment.md => import-mlperf-tiny-to-experiment/index.md} (100%) rename docs/MLPerf-benchmark-support/{import-mlperf-training-to-experiment.md => import-mlperf-training-to-experiment/index.md} (100%) rename docs/MLPerf-benchmark-support/{install-mlperf-logging-from-src.md => install-mlperf-logging-from-src/index.md} (100%) rename docs/MLPerf-benchmark-support/{prepare-training-data-bert.md => prepare-training-data-bert/index.md} (100%) rename docs/MLPerf-benchmark-support/{prepare-training-data-resnet.md => prepare-training-data-resnet/index.md} (100%) rename docs/MLPerf-benchmark-support/{preprocess-mlperf-inference-submission.md => preprocess-mlperf-inference-submission/index.md} (100%) rename docs/MLPerf-benchmark-support/{process-mlperf-accuracy.md => process-mlperf-accuracy/index.md} (100%) rename docs/MLPerf-benchmark-support/{push-mlperf-inference-results-to-github.md => push-mlperf-inference-results-to-github/index.md} (100%) rename docs/MLPerf-benchmark-support/{run-all-mlperf-models.md => run-all-mlperf-models/index.md} (100%) rename docs/MLPerf-benchmark-support/{run-mlperf-inference-mobilenet-models.md => run-mlperf-inference-mobilenet-models/index.md} (100%) rename docs/MLPerf-benchmark-support/{run-mlperf-inference-submission-checker.md => run-mlperf-inference-submission-checker/index.md} (100%) rename docs/MLPerf-benchmark-support/{run-mlperf-power-client.md => run-mlperf-power-client/index.md} (100%) rename docs/MLPerf-benchmark-support/{run-mlperf-power-server.md => run-mlperf-power-server/index.md} (100%) rename docs/MLPerf-benchmark-support/{run-mlperf-training-submission-checker.md => run-mlperf-training-submission-checker/index.md} (100%) rename docs/MLPerf-benchmark-support/{truncate-mlperf-inference-accuracy-log.md => truncate-mlperf-inference-accuracy-log/index.md} (100%) rename docs/Modular-AI-ML-application-pipeline/{app-image-classification-onnx-py.md => app-image-classification-onnx-py/index.md} (100%) rename docs/Modular-AI-ML-application-pipeline/{app-image-classification-tf-onnx-cpp.md => app-image-classification-tf-onnx-cpp/index.md} (100%) rename docs/Modular-AI-ML-application-pipeline/{app-image-classification-torch-py.md => app-image-classification-torch-py/index.md} (100%) rename docs/Modular-AI-ML-application-pipeline/{app-image-classification-tvm-onnx-py.md => app-image-classification-tvm-onnx-py/index.md} (100%) rename docs/Modular-AI-ML-application-pipeline/{app-stable-diffusion-onnx-py.md => app-stable-diffusion-onnx-py/index.md} (100%) rename docs/Modular-MLPerf-benchmarks/{app-mlperf-inference-dummy.md => app-mlperf-inference-dummy/index.md} (100%) rename docs/Modular-MLPerf-benchmarks/{app-mlperf-inference-intel.md => app-mlperf-inference-intel/index.md} (100%) rename docs/Modular-MLPerf-benchmarks/{app-mlperf-inference-qualcomm.md => app-mlperf-inference-qualcomm/index.md} (100%) rename docs/Modular-MLPerf-inference-benchmark-pipeline/{app-loadgen-generic-python.md => app-loadgen-generic-python/index.md} (100%) rename docs/Modular-MLPerf-inference-benchmark-pipeline/{app-mlperf-inference-ctuning-cpp-tflite.md => app-mlperf-inference-ctuning-cpp-tflite/index.md} (100%) rename docs/Modular-MLPerf-inference-benchmark-pipeline/{app-mlperf-inference-mlcommons-cpp.md => app-mlperf-inference-mlcommons-cpp/index.md} (100%) rename docs/Modular-MLPerf-inference-benchmark-pipeline/{app-mlperf-inference-mlcommons-python.md => app-mlperf-inference-mlcommons-python/index.md} (100%) rename docs/Modular-MLPerf-inference-benchmark-pipeline/{app-mlperf-inference.md => app-mlperf-inference/index.md} (100%) rename docs/Modular-MLPerf-inference-benchmark-pipeline/{benchmark-program-mlperf.md => benchmark-program-mlperf/index.md} (100%) rename docs/Modular-MLPerf-inference-benchmark-pipeline/{run-mlperf-inference-app.md => run-mlperf-inference-app/index.md} (100%) rename docs/Modular-MLPerf-training-benchmark-pipeline/{app-mlperf-training-nvidia.md => app-mlperf-training-nvidia/index.md} (100%) rename docs/Modular-MLPerf-training-benchmark-pipeline/{app-mlperf-training-reference.md => app-mlperf-training-reference/index.md} (100%) rename docs/Modular-application-pipeline/{app-image-corner-detection.md => app-image-corner-detection/index.md} (100%) rename docs/Platform-information/{detect-cpu.md => detect-cpu/index.md} (100%) rename docs/Platform-information/{detect-os.md => detect-os/index.md} (100%) rename docs/Python-automation/{activate-python-venv.md => activate-python-venv/index.md} (100%) rename docs/Python-automation/{get-generic-python-lib.md => get-generic-python-lib/index.md} (100%) rename docs/Python-automation/{get-python3.md => get-python3/index.md} (100%) rename docs/Python-automation/{install-generic-conda-package.md => install-generic-conda-package/index.md} (100%) rename docs/Python-automation/{install-python-src.md => install-python-src/index.md} (100%) rename docs/Python-automation/{install-python-venv.md => install-python-venv/index.md} (100%) rename docs/Remote-automation/{remote-run-commands.md => remote-run-commands/index.md} (100%) rename docs/Reproduce-MLPerf-benchmarks/{app-mlperf-inference-nvidia.md => app-mlperf-inference-nvidia/index.md} (100%) rename docs/Reproduce-MLPerf-benchmarks/{reproduce-mlperf-octoml-tinyml-results.md => reproduce-mlperf-octoml-tinyml-results/index.md} (100%) rename docs/Reproduce-MLPerf-benchmarks/{reproduce-mlperf-training-nvidia.md => reproduce-mlperf-training-nvidia/index.md} (100%) rename docs/Reproduce-MLPerf-benchmarks/{wrapper-reproduce-octoml-tinyml-submission.md => wrapper-reproduce-octoml-tinyml-submission/index.md} (100%) rename docs/Reproducibility-and-artifact-evaluation/{get-ipol-src.md => get-ipol-src/index.md} (100%) rename docs/Reproducibility-and-artifact-evaluation/{process-ae-users.md => process-ae-users/index.md} (100%) rename docs/Reproducibility-and-artifact-evaluation/{reproduce-ipol-paper-2022-439.md => reproduce-ipol-paper-2022-439/index.md} (100%) rename docs/Reproducibility-and-artifact-evaluation/{reproduce-micro-paper-2023-victima.md => reproduce-micro-paper-2023-victima/index.md} (100%) rename docs/Tests/{print-croissant-desc.md => print-croissant-desc/index.md} (100%) rename docs/Tests/{print-hello-world-java.md => print-hello-world-java/index.md} (100%) rename docs/Tests/{print-hello-world-javac.md => print-hello-world-javac/index.md} (100%) rename docs/Tests/{print-hello-world-py.md => print-hello-world-py/index.md} (100%) rename docs/Tests/{print-hello-world.md => print-hello-world/index.md} (100%) rename docs/Tests/{print-python-version.md => print-python-version/index.md} (100%) rename docs/Tests/{run-python.md => run-python/index.md} (100%) rename docs/Tests/{test-deps-conditions.md => test-deps-conditions/index.md} (100%) rename docs/Tests/{test-download-and-extract-artifacts.md => test-download-and-extract-artifacts/index.md} (100%) rename docs/Tests/{test-set-sys-user-cm.md => test-set-sys-user-cm/index.md} (100%) rename docs/Tests/{upgrade-python-pip.md => upgrade-python-pip/index.md} (100%) rename docs/TinyML-automation/{create-fpgaconvnet-app-tinyml.md => create-fpgaconvnet-app-tinyml/index.md} (100%) rename docs/TinyML-automation/{create-fpgaconvnet-config-tinyml.md => create-fpgaconvnet-config-tinyml/index.md} (100%) rename docs/TinyML-automation/{flash-tinyml-binary.md => flash-tinyml-binary/index.md} (100%) rename docs/TinyML-automation/{get-microtvm.md => get-microtvm/index.md} (100%) rename docs/TinyML-automation/{get-zephyr-sdk.md => get-zephyr-sdk/index.md} (100%) rename docs/TinyML-automation/{get-zephyr.md => get-zephyr/index.md} (100%) diff --git a/docs/AI-ML-datasets/get-croissant.md b/docs/AI-ML-datasets/get-croissant/index.md similarity index 100% rename from docs/AI-ML-datasets/get-croissant.md rename to docs/AI-ML-datasets/get-croissant/index.md diff --git a/docs/AI-ML-datasets/get-dataset-cifar10.md b/docs/AI-ML-datasets/get-dataset-cifar10/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-cifar10.md rename to docs/AI-ML-datasets/get-dataset-cifar10/index.md diff --git a/docs/AI-ML-datasets/get-dataset-cnndm.md b/docs/AI-ML-datasets/get-dataset-cnndm/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-cnndm.md rename to docs/AI-ML-datasets/get-dataset-cnndm/index.md diff --git a/docs/AI-ML-datasets/get-dataset-coco.md b/docs/AI-ML-datasets/get-dataset-coco/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-coco.md rename to docs/AI-ML-datasets/get-dataset-coco/index.md diff --git a/docs/AI-ML-datasets/get-dataset-coco2014.md b/docs/AI-ML-datasets/get-dataset-coco2014/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-coco2014.md rename to docs/AI-ML-datasets/get-dataset-coco2014/index.md diff --git a/docs/AI-ML-datasets/get-dataset-criteo.md b/docs/AI-ML-datasets/get-dataset-criteo/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-criteo.md rename to docs/AI-ML-datasets/get-dataset-criteo/index.md diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-aux.md b/docs/AI-ML-datasets/get-dataset-imagenet-aux/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-imagenet-aux.md rename to docs/AI-ML-datasets/get-dataset-imagenet-aux/index.md diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-calibration.md b/docs/AI-ML-datasets/get-dataset-imagenet-calibration/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-imagenet-calibration.md rename to docs/AI-ML-datasets/get-dataset-imagenet-calibration/index.md diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-helper.md b/docs/AI-ML-datasets/get-dataset-imagenet-helper/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-imagenet-helper.md rename to docs/AI-ML-datasets/get-dataset-imagenet-helper/index.md diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-train.md b/docs/AI-ML-datasets/get-dataset-imagenet-train/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-imagenet-train.md rename to docs/AI-ML-datasets/get-dataset-imagenet-train/index.md diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-val.md b/docs/AI-ML-datasets/get-dataset-imagenet-val/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-imagenet-val.md rename to docs/AI-ML-datasets/get-dataset-imagenet-val/index.md diff --git a/docs/AI-ML-datasets/get-dataset-kits19.md b/docs/AI-ML-datasets/get-dataset-kits19/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-kits19.md rename to docs/AI-ML-datasets/get-dataset-kits19/index.md diff --git a/docs/AI-ML-datasets/get-dataset-librispeech.md b/docs/AI-ML-datasets/get-dataset-librispeech/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-librispeech.md rename to docs/AI-ML-datasets/get-dataset-librispeech/index.md diff --git a/docs/AI-ML-datasets/get-dataset-openimages-annotations.md b/docs/AI-ML-datasets/get-dataset-openimages-annotations/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-openimages-annotations.md rename to docs/AI-ML-datasets/get-dataset-openimages-annotations/index.md diff --git a/docs/AI-ML-datasets/get-dataset-openimages-calibration.md b/docs/AI-ML-datasets/get-dataset-openimages-calibration/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-openimages-calibration.md rename to docs/AI-ML-datasets/get-dataset-openimages-calibration/index.md diff --git a/docs/AI-ML-datasets/get-dataset-openimages.md b/docs/AI-ML-datasets/get-dataset-openimages/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-openimages.md rename to docs/AI-ML-datasets/get-dataset-openimages/index.md diff --git a/docs/AI-ML-datasets/get-dataset-openorca.md b/docs/AI-ML-datasets/get-dataset-openorca/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-openorca.md rename to docs/AI-ML-datasets/get-dataset-openorca/index.md diff --git a/docs/AI-ML-datasets/get-dataset-squad-vocab.md b/docs/AI-ML-datasets/get-dataset-squad-vocab/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-squad-vocab.md rename to docs/AI-ML-datasets/get-dataset-squad-vocab/index.md diff --git a/docs/AI-ML-datasets/get-dataset-squad.md b/docs/AI-ML-datasets/get-dataset-squad/index.md similarity index 100% rename from docs/AI-ML-datasets/get-dataset-squad.md rename to docs/AI-ML-datasets/get-dataset-squad/index.md diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-criteo.md b/docs/AI-ML-datasets/get-preprocessed-dataset-criteo/index.md similarity index 100% rename from docs/AI-ML-datasets/get-preprocessed-dataset-criteo.md rename to docs/AI-ML-datasets/get-preprocessed-dataset-criteo/index.md diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-generic.md b/docs/AI-ML-datasets/get-preprocessed-dataset-generic/index.md similarity index 100% rename from docs/AI-ML-datasets/get-preprocessed-dataset-generic.md rename to docs/AI-ML-datasets/get-preprocessed-dataset-generic/index.md diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-imagenet.md b/docs/AI-ML-datasets/get-preprocessed-dataset-imagenet/index.md similarity index 100% rename from docs/AI-ML-datasets/get-preprocessed-dataset-imagenet.md rename to docs/AI-ML-datasets/get-preprocessed-dataset-imagenet/index.md diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-kits19.md b/docs/AI-ML-datasets/get-preprocessed-dataset-kits19/index.md similarity index 100% rename from docs/AI-ML-datasets/get-preprocessed-dataset-kits19.md rename to docs/AI-ML-datasets/get-preprocessed-dataset-kits19/index.md diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-librispeech.md b/docs/AI-ML-datasets/get-preprocessed-dataset-librispeech/index.md similarity index 100% rename from docs/AI-ML-datasets/get-preprocessed-dataset-librispeech.md rename to docs/AI-ML-datasets/get-preprocessed-dataset-librispeech/index.md diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-openimages.md b/docs/AI-ML-datasets/get-preprocessed-dataset-openimages/index.md similarity index 100% rename from docs/AI-ML-datasets/get-preprocessed-dataset-openimages.md rename to docs/AI-ML-datasets/get-preprocessed-dataset-openimages/index.md diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-openorca.md b/docs/AI-ML-datasets/get-preprocessed-dataset-openorca/index.md similarity index 100% rename from docs/AI-ML-datasets/get-preprocessed-dataset-openorca.md rename to docs/AI-ML-datasets/get-preprocessed-dataset-openorca/index.md diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-squad.md b/docs/AI-ML-datasets/get-preprocessed-dataset-squad/index.md similarity index 100% rename from docs/AI-ML-datasets/get-preprocessed-dataset-squad.md rename to docs/AI-ML-datasets/get-preprocessed-dataset-squad/index.md diff --git a/docs/AI-ML-frameworks/get-google-saxml.md b/docs/AI-ML-frameworks/get-google-saxml/index.md similarity index 100% rename from docs/AI-ML-frameworks/get-google-saxml.md rename to docs/AI-ML-frameworks/get-google-saxml/index.md diff --git a/docs/AI-ML-frameworks/get-onnxruntime-prebuilt.md b/docs/AI-ML-frameworks/get-onnxruntime-prebuilt/index.md similarity index 100% rename from docs/AI-ML-frameworks/get-onnxruntime-prebuilt.md rename to docs/AI-ML-frameworks/get-onnxruntime-prebuilt/index.md diff --git a/docs/AI-ML-frameworks/get-qaic-apps-sdk.md b/docs/AI-ML-frameworks/get-qaic-apps-sdk/index.md similarity index 100% rename from docs/AI-ML-frameworks/get-qaic-apps-sdk.md rename to docs/AI-ML-frameworks/get-qaic-apps-sdk/index.md diff --git a/docs/AI-ML-frameworks/get-qaic-platform-sdk.md b/docs/AI-ML-frameworks/get-qaic-platform-sdk/index.md similarity index 100% rename from docs/AI-ML-frameworks/get-qaic-platform-sdk.md rename to docs/AI-ML-frameworks/get-qaic-platform-sdk/index.md diff --git a/docs/AI-ML-frameworks/get-qaic-software-kit.md b/docs/AI-ML-frameworks/get-qaic-software-kit/index.md similarity index 100% rename from docs/AI-ML-frameworks/get-qaic-software-kit.md rename to docs/AI-ML-frameworks/get-qaic-software-kit/index.md diff --git a/docs/AI-ML-frameworks/get-rocm.md b/docs/AI-ML-frameworks/get-rocm/index.md similarity index 100% rename from docs/AI-ML-frameworks/get-rocm.md rename to docs/AI-ML-frameworks/get-rocm/index.md diff --git a/docs/AI-ML-frameworks/get-tvm.md b/docs/AI-ML-frameworks/get-tvm/index.md similarity index 100% rename from docs/AI-ML-frameworks/get-tvm.md rename to docs/AI-ML-frameworks/get-tvm/index.md diff --git a/docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src.md b/docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src/index.md similarity index 100% rename from docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src.md rename to docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src/index.md diff --git a/docs/AI-ML-frameworks/install-rocm.md b/docs/AI-ML-frameworks/install-rocm/index.md similarity index 100% rename from docs/AI-ML-frameworks/install-rocm.md rename to docs/AI-ML-frameworks/install-rocm/index.md diff --git a/docs/AI-ML-frameworks/install-tensorflow-for-c.md b/docs/AI-ML-frameworks/install-tensorflow-for-c/index.md similarity index 100% rename from docs/AI-ML-frameworks/install-tensorflow-for-c.md rename to docs/AI-ML-frameworks/install-tensorflow-for-c/index.md diff --git a/docs/AI-ML-frameworks/install-tensorflow-from-src.md b/docs/AI-ML-frameworks/install-tensorflow-from-src/index.md similarity index 100% rename from docs/AI-ML-frameworks/install-tensorflow-from-src.md rename to docs/AI-ML-frameworks/install-tensorflow-from-src/index.md diff --git a/docs/AI-ML-frameworks/install-tflite-from-src.md b/docs/AI-ML-frameworks/install-tflite-from-src/index.md similarity index 100% rename from docs/AI-ML-frameworks/install-tflite-from-src.md rename to docs/AI-ML-frameworks/install-tflite-from-src/index.md diff --git a/docs/AI-ML-models/convert-ml-model-huggingface-to-onnx.md b/docs/AI-ML-models/convert-ml-model-huggingface-to-onnx/index.md similarity index 100% rename from docs/AI-ML-models/convert-ml-model-huggingface-to-onnx.md rename to docs/AI-ML-models/convert-ml-model-huggingface-to-onnx/index.md diff --git a/docs/AI-ML-models/get-bert-squad-vocab.md b/docs/AI-ML-models/get-bert-squad-vocab/index.md similarity index 100% rename from docs/AI-ML-models/get-bert-squad-vocab.md rename to docs/AI-ML-models/get-bert-squad-vocab/index.md diff --git a/docs/AI-ML-models/get-dlrm.md b/docs/AI-ML-models/get-dlrm/index.md similarity index 100% rename from docs/AI-ML-models/get-dlrm.md rename to docs/AI-ML-models/get-dlrm/index.md diff --git a/docs/AI-ML-models/get-ml-model-3d-unet-kits19.md b/docs/AI-ML-models/get-ml-model-3d-unet-kits19/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-3d-unet-kits19.md rename to docs/AI-ML-models/get-ml-model-3d-unet-kits19/index.md diff --git a/docs/AI-ML-models/get-ml-model-bert-base-squad.md b/docs/AI-ML-models/get-ml-model-bert-base-squad/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-bert-base-squad.md rename to docs/AI-ML-models/get-ml-model-bert-base-squad/index.md diff --git a/docs/AI-ML-models/get-ml-model-bert-large-squad.md b/docs/AI-ML-models/get-ml-model-bert-large-squad/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-bert-large-squad.md rename to docs/AI-ML-models/get-ml-model-bert-large-squad/index.md diff --git a/docs/AI-ML-models/get-ml-model-dlrm-terabyte.md b/docs/AI-ML-models/get-ml-model-dlrm-terabyte/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-dlrm-terabyte.md rename to docs/AI-ML-models/get-ml-model-dlrm-terabyte/index.md diff --git a/docs/AI-ML-models/get-ml-model-efficientnet-lite.md b/docs/AI-ML-models/get-ml-model-efficientnet-lite/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-efficientnet-lite.md rename to docs/AI-ML-models/get-ml-model-efficientnet-lite/index.md diff --git a/docs/AI-ML-models/get-ml-model-gptj.md b/docs/AI-ML-models/get-ml-model-gptj/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-gptj.md rename to docs/AI-ML-models/get-ml-model-gptj/index.md diff --git a/docs/AI-ML-models/get-ml-model-huggingface-zoo.md b/docs/AI-ML-models/get-ml-model-huggingface-zoo/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-huggingface-zoo.md rename to docs/AI-ML-models/get-ml-model-huggingface-zoo/index.md diff --git a/docs/AI-ML-models/get-ml-model-llama2.md b/docs/AI-ML-models/get-ml-model-llama2/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-llama2.md rename to docs/AI-ML-models/get-ml-model-llama2/index.md diff --git a/docs/AI-ML-models/get-ml-model-mobilenet.md b/docs/AI-ML-models/get-ml-model-mobilenet/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-mobilenet.md rename to docs/AI-ML-models/get-ml-model-mobilenet/index.md diff --git a/docs/AI-ML-models/get-ml-model-neuralmagic-zoo.md b/docs/AI-ML-models/get-ml-model-neuralmagic-zoo/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-neuralmagic-zoo.md rename to docs/AI-ML-models/get-ml-model-neuralmagic-zoo/index.md diff --git a/docs/AI-ML-models/get-ml-model-resnet50.md b/docs/AI-ML-models/get-ml-model-resnet50/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-resnet50.md rename to docs/AI-ML-models/get-ml-model-resnet50/index.md diff --git a/docs/AI-ML-models/get-ml-model-retinanet-nvidia.md b/docs/AI-ML-models/get-ml-model-retinanet-nvidia/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-retinanet-nvidia.md rename to docs/AI-ML-models/get-ml-model-retinanet-nvidia/index.md diff --git a/docs/AI-ML-models/get-ml-model-retinanet.md b/docs/AI-ML-models/get-ml-model-retinanet/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-retinanet.md rename to docs/AI-ML-models/get-ml-model-retinanet/index.md diff --git a/docs/AI-ML-models/get-ml-model-rnnt.md b/docs/AI-ML-models/get-ml-model-rnnt/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-rnnt.md rename to docs/AI-ML-models/get-ml-model-rnnt/index.md diff --git a/docs/AI-ML-models/get-ml-model-stable-diffusion.md b/docs/AI-ML-models/get-ml-model-stable-diffusion/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-stable-diffusion.md rename to docs/AI-ML-models/get-ml-model-stable-diffusion/index.md diff --git a/docs/AI-ML-models/get-ml-model-tiny-resnet.md b/docs/AI-ML-models/get-ml-model-tiny-resnet/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-tiny-resnet.md rename to docs/AI-ML-models/get-ml-model-tiny-resnet/index.md diff --git a/docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo.md b/docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo/index.md similarity index 100% rename from docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo.md rename to docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo/index.md diff --git a/docs/AI-ML-models/get-tvm-model.md b/docs/AI-ML-models/get-tvm-model/index.md similarity index 100% rename from docs/AI-ML-models/get-tvm-model.md rename to docs/AI-ML-models/get-tvm-model/index.md diff --git a/docs/AI-ML-optimization/calibrate-model-for.qaic.md b/docs/AI-ML-optimization/calibrate-model-for.qaic/index.md similarity index 100% rename from docs/AI-ML-optimization/calibrate-model-for.qaic.md rename to docs/AI-ML-optimization/calibrate-model-for.qaic/index.md diff --git a/docs/AI-ML-optimization/compile-model-for.qaic.md b/docs/AI-ML-optimization/compile-model-for.qaic/index.md similarity index 100% rename from docs/AI-ML-optimization/compile-model-for.qaic.md rename to docs/AI-ML-optimization/compile-model-for.qaic/index.md diff --git a/docs/AI-ML-optimization/prune-bert-models.md b/docs/AI-ML-optimization/prune-bert-models/index.md similarity index 100% rename from docs/AI-ML-optimization/prune-bert-models.md rename to docs/AI-ML-optimization/prune-bert-models/index.md diff --git a/docs/CM-interface-prototyping/test-mlperf-inference-retinanet.md b/docs/CM-interface-prototyping/test-mlperf-inference-retinanet/index.md similarity index 100% rename from docs/CM-interface-prototyping/test-mlperf-inference-retinanet.md rename to docs/CM-interface-prototyping/test-mlperf-inference-retinanet/index.md diff --git a/docs/CUDA-automation/get-cuda-devices.md b/docs/CUDA-automation/get-cuda-devices/index.md similarity index 100% rename from docs/CUDA-automation/get-cuda-devices.md rename to docs/CUDA-automation/get-cuda-devices/index.md diff --git a/docs/CUDA-automation/get-cuda.md b/docs/CUDA-automation/get-cuda/index.md similarity index 100% rename from docs/CUDA-automation/get-cuda.md rename to docs/CUDA-automation/get-cuda/index.md diff --git a/docs/CUDA-automation/get-cudnn.md b/docs/CUDA-automation/get-cudnn/index.md similarity index 100% rename from docs/CUDA-automation/get-cudnn.md rename to docs/CUDA-automation/get-cudnn/index.md diff --git a/docs/CUDA-automation/get-tensorrt.md b/docs/CUDA-automation/get-tensorrt/index.md similarity index 100% rename from docs/CUDA-automation/get-tensorrt.md rename to docs/CUDA-automation/get-tensorrt/index.md diff --git a/docs/CUDA-automation/install-cuda-package-manager.md b/docs/CUDA-automation/install-cuda-package-manager/index.md similarity index 100% rename from docs/CUDA-automation/install-cuda-package-manager.md rename to docs/CUDA-automation/install-cuda-package-manager/index.md diff --git a/docs/CUDA-automation/install-cuda-prebuilt.md b/docs/CUDA-automation/install-cuda-prebuilt/index.md similarity index 100% rename from docs/CUDA-automation/install-cuda-prebuilt.md rename to docs/CUDA-automation/install-cuda-prebuilt/index.md diff --git a/docs/Cloud-automation/destroy-terraform.md b/docs/Cloud-automation/destroy-terraform/index.md similarity index 100% rename from docs/Cloud-automation/destroy-terraform.md rename to docs/Cloud-automation/destroy-terraform/index.md diff --git a/docs/Cloud-automation/get-aws-cli.md b/docs/Cloud-automation/get-aws-cli/index.md similarity index 100% rename from docs/Cloud-automation/get-aws-cli.md rename to docs/Cloud-automation/get-aws-cli/index.md diff --git a/docs/Cloud-automation/get-terraform.md b/docs/Cloud-automation/get-terraform/index.md similarity index 100% rename from docs/Cloud-automation/get-terraform.md rename to docs/Cloud-automation/get-terraform/index.md diff --git a/docs/Cloud-automation/install-aws-cli.md b/docs/Cloud-automation/install-aws-cli/index.md similarity index 100% rename from docs/Cloud-automation/install-aws-cli.md rename to docs/Cloud-automation/install-aws-cli/index.md diff --git a/docs/Cloud-automation/install-terraform-from-src.md b/docs/Cloud-automation/install-terraform-from-src/index.md similarity index 100% rename from docs/Cloud-automation/install-terraform-from-src.md rename to docs/Cloud-automation/install-terraform-from-src/index.md diff --git a/docs/Cloud-automation/run-terraform.md b/docs/Cloud-automation/run-terraform/index.md similarity index 100% rename from docs/Cloud-automation/run-terraform.md rename to docs/Cloud-automation/run-terraform/index.md diff --git a/docs/Collective-benchmarking/launch-benchmark.md b/docs/Collective-benchmarking/launch-benchmark/index.md similarity index 100% rename from docs/Collective-benchmarking/launch-benchmark.md rename to docs/Collective-benchmarking/launch-benchmark/index.md diff --git a/docs/Compiler-automation/get-aocl.md b/docs/Compiler-automation/get-aocl/index.md similarity index 100% rename from docs/Compiler-automation/get-aocl.md rename to docs/Compiler-automation/get-aocl/index.md diff --git a/docs/Compiler-automation/get-cl.md b/docs/Compiler-automation/get-cl/index.md similarity index 100% rename from docs/Compiler-automation/get-cl.md rename to docs/Compiler-automation/get-cl/index.md diff --git a/docs/Compiler-automation/get-compiler-flags.md b/docs/Compiler-automation/get-compiler-flags/index.md similarity index 100% rename from docs/Compiler-automation/get-compiler-flags.md rename to docs/Compiler-automation/get-compiler-flags/index.md diff --git a/docs/Compiler-automation/get-compiler-rust.md b/docs/Compiler-automation/get-compiler-rust/index.md similarity index 100% rename from docs/Compiler-automation/get-compiler-rust.md rename to docs/Compiler-automation/get-compiler-rust/index.md diff --git a/docs/Compiler-automation/get-gcc.md b/docs/Compiler-automation/get-gcc/index.md similarity index 100% rename from docs/Compiler-automation/get-gcc.md rename to docs/Compiler-automation/get-gcc/index.md diff --git a/docs/Compiler-automation/get-go.md b/docs/Compiler-automation/get-go/index.md similarity index 100% rename from docs/Compiler-automation/get-go.md rename to docs/Compiler-automation/get-go/index.md diff --git a/docs/Compiler-automation/get-llvm.md b/docs/Compiler-automation/get-llvm/index.md similarity index 100% rename from docs/Compiler-automation/get-llvm.md rename to docs/Compiler-automation/get-llvm/index.md diff --git a/docs/Compiler-automation/install-gcc-src.md b/docs/Compiler-automation/install-gcc-src/index.md similarity index 100% rename from docs/Compiler-automation/install-gcc-src.md rename to docs/Compiler-automation/install-gcc-src/index.md diff --git a/docs/Compiler-automation/install-ipex-from-src.md b/docs/Compiler-automation/install-ipex-from-src/index.md similarity index 100% rename from docs/Compiler-automation/install-ipex-from-src.md rename to docs/Compiler-automation/install-ipex-from-src/index.md diff --git a/docs/Compiler-automation/install-llvm-prebuilt.md b/docs/Compiler-automation/install-llvm-prebuilt/index.md similarity index 100% rename from docs/Compiler-automation/install-llvm-prebuilt.md rename to docs/Compiler-automation/install-llvm-prebuilt/index.md diff --git a/docs/Compiler-automation/install-llvm-src.md b/docs/Compiler-automation/install-llvm-src/index.md similarity index 100% rename from docs/Compiler-automation/install-llvm-src.md rename to docs/Compiler-automation/install-llvm-src/index.md diff --git a/docs/Compiler-automation/install-onednn-from-src.md b/docs/Compiler-automation/install-onednn-from-src/index.md similarity index 100% rename from docs/Compiler-automation/install-onednn-from-src.md rename to docs/Compiler-automation/install-onednn-from-src/index.md diff --git a/docs/Compiler-automation/install-onnxruntime-from-src.md b/docs/Compiler-automation/install-onnxruntime-from-src/index.md similarity index 100% rename from docs/Compiler-automation/install-onnxruntime-from-src.md rename to docs/Compiler-automation/install-onnxruntime-from-src/index.md diff --git a/docs/Compiler-automation/install-pytorch-from-src.md b/docs/Compiler-automation/install-pytorch-from-src/index.md similarity index 100% rename from docs/Compiler-automation/install-pytorch-from-src.md rename to docs/Compiler-automation/install-pytorch-from-src/index.md diff --git a/docs/Compiler-automation/install-pytorch-kineto-from-src.md b/docs/Compiler-automation/install-pytorch-kineto-from-src/index.md similarity index 100% rename from docs/Compiler-automation/install-pytorch-kineto-from-src.md rename to docs/Compiler-automation/install-pytorch-kineto-from-src/index.md diff --git a/docs/Compiler-automation/install-torchvision-from-src.md b/docs/Compiler-automation/install-torchvision-from-src/index.md similarity index 100% rename from docs/Compiler-automation/install-torchvision-from-src.md rename to docs/Compiler-automation/install-torchvision-from-src/index.md diff --git a/docs/Compiler-automation/install-tpp-pytorch-extension.md b/docs/Compiler-automation/install-tpp-pytorch-extension/index.md similarity index 100% rename from docs/Compiler-automation/install-tpp-pytorch-extension.md rename to docs/Compiler-automation/install-tpp-pytorch-extension/index.md diff --git a/docs/Compiler-automation/install-transformers-from-src.md b/docs/Compiler-automation/install-transformers-from-src/index.md similarity index 100% rename from docs/Compiler-automation/install-transformers-from-src.md rename to docs/Compiler-automation/install-transformers-from-src/index.md diff --git a/docs/Dashboard-automation/publish-results-to-dashboard.md b/docs/Dashboard-automation/publish-results-to-dashboard/index.md similarity index 100% rename from docs/Dashboard-automation/publish-results-to-dashboard.md rename to docs/Dashboard-automation/publish-results-to-dashboard/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-aria2.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-aria2/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-aria2.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-aria2/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-bazel.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-bazel/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-bazel.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-bazel/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-blis.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-blis/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-blis.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-blis/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-brew.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-brew/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-brew.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-brew/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-cmake.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-cmake/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-cmake.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-cmake/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-docker.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-docker/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-docker.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-docker/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-google-test.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-google-test/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-google-test.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-google-test/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-java.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-java/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-java.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-java/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-javac.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-javac/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-javac.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-javac/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-openssl.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-openssl/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-openssl.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-openssl/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-rclone.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-rclone/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-rclone.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-rclone/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn.md rename to docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-bazel.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-bazel/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/install-bazel.md rename to docs/Detection-or-installation-of-tools-and-artifacts/install-bazel/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt.md rename to docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-gflags.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-gflags/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/install-gflags.md rename to docs/Detection-or-installation-of-tools-and-artifacts/install-gflags/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli.md rename to docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src.md rename to docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src/index.md diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-openssl.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-openssl/index.md similarity index 100% rename from docs/Detection-or-installation-of-tools-and-artifacts/install-openssl.md rename to docs/Detection-or-installation-of-tools-and-artifacts/install-openssl/index.md diff --git a/docs/DevOps-automation/benchmark-program.md b/docs/DevOps-automation/benchmark-program/index.md similarity index 100% rename from docs/DevOps-automation/benchmark-program.md rename to docs/DevOps-automation/benchmark-program/index.md diff --git a/docs/DevOps-automation/compile-program.md b/docs/DevOps-automation/compile-program/index.md similarity index 100% rename from docs/DevOps-automation/compile-program.md rename to docs/DevOps-automation/compile-program/index.md diff --git a/docs/DevOps-automation/convert-csv-to-md.md b/docs/DevOps-automation/convert-csv-to-md/index.md similarity index 100% rename from docs/DevOps-automation/convert-csv-to-md.md rename to docs/DevOps-automation/convert-csv-to-md/index.md diff --git a/docs/DevOps-automation/copy-to-clipboard.md b/docs/DevOps-automation/copy-to-clipboard/index.md similarity index 100% rename from docs/DevOps-automation/copy-to-clipboard.md rename to docs/DevOps-automation/copy-to-clipboard/index.md diff --git a/docs/DevOps-automation/create-conda-env.md b/docs/DevOps-automation/create-conda-env/index.md similarity index 100% rename from docs/DevOps-automation/create-conda-env.md rename to docs/DevOps-automation/create-conda-env/index.md diff --git a/docs/DevOps-automation/create-patch.md b/docs/DevOps-automation/create-patch/index.md similarity index 100% rename from docs/DevOps-automation/create-patch.md rename to docs/DevOps-automation/create-patch/index.md diff --git a/docs/DevOps-automation/detect-sudo.md b/docs/DevOps-automation/detect-sudo/index.md similarity index 100% rename from docs/DevOps-automation/detect-sudo.md rename to docs/DevOps-automation/detect-sudo/index.md diff --git a/docs/DevOps-automation/download-and-extract.md b/docs/DevOps-automation/download-and-extract/index.md similarity index 100% rename from docs/DevOps-automation/download-and-extract.md rename to docs/DevOps-automation/download-and-extract/index.md diff --git a/docs/DevOps-automation/download-file.md b/docs/DevOps-automation/download-file/index.md similarity index 100% rename from docs/DevOps-automation/download-file.md rename to docs/DevOps-automation/download-file/index.md diff --git a/docs/DevOps-automation/download-torrent.md b/docs/DevOps-automation/download-torrent/index.md similarity index 100% rename from docs/DevOps-automation/download-torrent.md rename to docs/DevOps-automation/download-torrent/index.md diff --git a/docs/DevOps-automation/extract-file.md b/docs/DevOps-automation/extract-file/index.md similarity index 100% rename from docs/DevOps-automation/extract-file.md rename to docs/DevOps-automation/extract-file/index.md diff --git a/docs/DevOps-automation/fail.md b/docs/DevOps-automation/fail/index.md similarity index 100% rename from docs/DevOps-automation/fail.md rename to docs/DevOps-automation/fail/index.md diff --git a/docs/DevOps-automation/get-conda.md b/docs/DevOps-automation/get-conda/index.md similarity index 100% rename from docs/DevOps-automation/get-conda.md rename to docs/DevOps-automation/get-conda/index.md diff --git a/docs/DevOps-automation/get-git-repo.md b/docs/DevOps-automation/get-git-repo/index.md similarity index 100% rename from docs/DevOps-automation/get-git-repo.md rename to docs/DevOps-automation/get-git-repo/index.md diff --git a/docs/DevOps-automation/get-github-cli.md b/docs/DevOps-automation/get-github-cli/index.md similarity index 100% rename from docs/DevOps-automation/get-github-cli.md rename to docs/DevOps-automation/get-github-cli/index.md diff --git a/docs/DevOps-automation/pull-git-repo.md b/docs/DevOps-automation/pull-git-repo/index.md similarity index 100% rename from docs/DevOps-automation/pull-git-repo.md rename to docs/DevOps-automation/pull-git-repo/index.md diff --git a/docs/DevOps-automation/push-csv-to-spreadsheet.md b/docs/DevOps-automation/push-csv-to-spreadsheet/index.md similarity index 100% rename from docs/DevOps-automation/push-csv-to-spreadsheet.md rename to docs/DevOps-automation/push-csv-to-spreadsheet/index.md diff --git a/docs/DevOps-automation/set-device-settings-qaic.md b/docs/DevOps-automation/set-device-settings-qaic/index.md similarity index 100% rename from docs/DevOps-automation/set-device-settings-qaic.md rename to docs/DevOps-automation/set-device-settings-qaic/index.md diff --git a/docs/DevOps-automation/set-echo-off-win.md b/docs/DevOps-automation/set-echo-off-win/index.md similarity index 100% rename from docs/DevOps-automation/set-echo-off-win.md rename to docs/DevOps-automation/set-echo-off-win/index.md diff --git a/docs/DevOps-automation/set-performance-mode.md b/docs/DevOps-automation/set-performance-mode/index.md similarity index 100% rename from docs/DevOps-automation/set-performance-mode.md rename to docs/DevOps-automation/set-performance-mode/index.md diff --git a/docs/DevOps-automation/set-sqlite-dir.md b/docs/DevOps-automation/set-sqlite-dir/index.md similarity index 100% rename from docs/DevOps-automation/set-sqlite-dir.md rename to docs/DevOps-automation/set-sqlite-dir/index.md diff --git a/docs/DevOps-automation/tar-my-folder.md b/docs/DevOps-automation/tar-my-folder/index.md similarity index 100% rename from docs/DevOps-automation/tar-my-folder.md rename to docs/DevOps-automation/tar-my-folder/index.md diff --git a/docs/Docker-automation/build-docker-image.md b/docs/Docker-automation/build-docker-image/index.md similarity index 100% rename from docs/Docker-automation/build-docker-image.md rename to docs/Docker-automation/build-docker-image/index.md diff --git a/docs/Docker-automation/build-dockerfile.md b/docs/Docker-automation/build-dockerfile/index.md similarity index 100% rename from docs/Docker-automation/build-dockerfile.md rename to docs/Docker-automation/build-dockerfile/index.md diff --git a/docs/Docker-automation/prune-docker.md b/docs/Docker-automation/prune-docker/index.md similarity index 100% rename from docs/Docker-automation/prune-docker.md rename to docs/Docker-automation/prune-docker/index.md diff --git a/docs/Docker-automation/run-docker-container.md b/docs/Docker-automation/run-docker-container/index.md similarity index 100% rename from docs/Docker-automation/run-docker-container.md rename to docs/Docker-automation/run-docker-container/index.md diff --git a/docs/GUI/gui.md b/docs/GUI/gui/index.md similarity index 100% rename from docs/GUI/gui.md rename to docs/GUI/gui/index.md diff --git a/docs/Legacy-CK-support/get-ck-repo-mlops.md b/docs/Legacy-CK-support/get-ck-repo-mlops/index.md similarity index 100% rename from docs/Legacy-CK-support/get-ck-repo-mlops.md rename to docs/Legacy-CK-support/get-ck-repo-mlops/index.md diff --git a/docs/Legacy-CK-support/get-ck.md b/docs/Legacy-CK-support/get-ck/index.md similarity index 100% rename from docs/Legacy-CK-support/get-ck.md rename to docs/Legacy-CK-support/get-ck/index.md diff --git a/docs/MLPerf-benchmark-support/add-custom-nvidia-system.md b/docs/MLPerf-benchmark-support/add-custom-nvidia-system/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/add-custom-nvidia-system.md rename to docs/MLPerf-benchmark-support/add-custom-nvidia-system/index.md diff --git a/docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation.md b/docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation.md rename to docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation/index.md diff --git a/docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia.md b/docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia.md rename to docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia/index.md diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-inference-submission.md b/docs/MLPerf-benchmark-support/generate-mlperf-inference-submission/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/generate-mlperf-inference-submission.md rename to docs/MLPerf-benchmark-support/generate-mlperf-inference-submission/index.md diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf.md b/docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf.md rename to docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf/index.md diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-tiny-report.md b/docs/MLPerf-benchmark-support/generate-mlperf-tiny-report/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/generate-mlperf-tiny-report.md rename to docs/MLPerf-benchmark-support/generate-mlperf-tiny-report/index.md diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission.md b/docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission.md rename to docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission/index.md diff --git a/docs/MLPerf-benchmark-support/generate-nvidia-engine.md b/docs/MLPerf-benchmark-support/generate-nvidia-engine/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/generate-nvidia-engine.md rename to docs/MLPerf-benchmark-support/generate-nvidia-engine/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space.md rename to docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen.md rename to docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code.md rename to docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space.md rename to docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir.md rename to docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-results.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-results/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-inference-results.md rename to docs/MLPerf-benchmark-support/get-mlperf-inference-results/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-src.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-src/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-inference-src.md rename to docs/MLPerf-benchmark-support/get-mlperf-inference-src/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir.md rename to docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs.md rename to docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description.md rename to docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-logging.md b/docs/MLPerf-benchmark-support/get-mlperf-logging/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-logging.md rename to docs/MLPerf-benchmark-support/get-mlperf-logging/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-power-dev.md b/docs/MLPerf-benchmark-support/get-mlperf-power-dev/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-power-dev.md rename to docs/MLPerf-benchmark-support/get-mlperf-power-dev/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src.md b/docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src.md rename to docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-tiny-src.md b/docs/MLPerf-benchmark-support/get-mlperf-tiny-src/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-tiny-src.md rename to docs/MLPerf-benchmark-support/get-mlperf-tiny-src/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code.md b/docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code.md rename to docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code/index.md diff --git a/docs/MLPerf-benchmark-support/get-mlperf-training-src.md b/docs/MLPerf-benchmark-support/get-mlperf-training-src/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-mlperf-training-src.md rename to docs/MLPerf-benchmark-support/get-mlperf-training-src/index.md diff --git a/docs/MLPerf-benchmark-support/get-nvidia-mitten.md b/docs/MLPerf-benchmark-support/get-nvidia-mitten/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-nvidia-mitten.md rename to docs/MLPerf-benchmark-support/get-nvidia-mitten/index.md diff --git a/docs/MLPerf-benchmark-support/get-spec-ptd.md b/docs/MLPerf-benchmark-support/get-spec-ptd/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/get-spec-ptd.md rename to docs/MLPerf-benchmark-support/get-spec-ptd/index.md diff --git a/docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment.md b/docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment.md rename to docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment/index.md diff --git a/docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment.md b/docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment.md rename to docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment/index.md diff --git a/docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment.md b/docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment.md rename to docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment/index.md diff --git a/docs/MLPerf-benchmark-support/install-mlperf-logging-from-src.md b/docs/MLPerf-benchmark-support/install-mlperf-logging-from-src/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/install-mlperf-logging-from-src.md rename to docs/MLPerf-benchmark-support/install-mlperf-logging-from-src/index.md diff --git a/docs/MLPerf-benchmark-support/prepare-training-data-bert.md b/docs/MLPerf-benchmark-support/prepare-training-data-bert/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/prepare-training-data-bert.md rename to docs/MLPerf-benchmark-support/prepare-training-data-bert/index.md diff --git a/docs/MLPerf-benchmark-support/prepare-training-data-resnet.md b/docs/MLPerf-benchmark-support/prepare-training-data-resnet/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/prepare-training-data-resnet.md rename to docs/MLPerf-benchmark-support/prepare-training-data-resnet/index.md diff --git a/docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission.md b/docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission.md rename to docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission/index.md diff --git a/docs/MLPerf-benchmark-support/process-mlperf-accuracy.md b/docs/MLPerf-benchmark-support/process-mlperf-accuracy/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/process-mlperf-accuracy.md rename to docs/MLPerf-benchmark-support/process-mlperf-accuracy/index.md diff --git a/docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github.md b/docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github.md rename to docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github/index.md diff --git a/docs/MLPerf-benchmark-support/run-all-mlperf-models.md b/docs/MLPerf-benchmark-support/run-all-mlperf-models/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/run-all-mlperf-models.md rename to docs/MLPerf-benchmark-support/run-all-mlperf-models/index.md diff --git a/docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models.md b/docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models.md rename to docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models/index.md diff --git a/docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker.md b/docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker.md rename to docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker/index.md diff --git a/docs/MLPerf-benchmark-support/run-mlperf-power-client.md b/docs/MLPerf-benchmark-support/run-mlperf-power-client/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/run-mlperf-power-client.md rename to docs/MLPerf-benchmark-support/run-mlperf-power-client/index.md diff --git a/docs/MLPerf-benchmark-support/run-mlperf-power-server.md b/docs/MLPerf-benchmark-support/run-mlperf-power-server/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/run-mlperf-power-server.md rename to docs/MLPerf-benchmark-support/run-mlperf-power-server/index.md diff --git a/docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker.md b/docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker.md rename to docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker/index.md diff --git a/docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log.md b/docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log/index.md similarity index 100% rename from docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log.md rename to docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log/index.md diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py/index.md similarity index 100% rename from docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py.md rename to docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py/index.md diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp/index.md similarity index 100% rename from docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp.md rename to docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp/index.md diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py/index.md similarity index 100% rename from docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py.md rename to docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py/index.md diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py/index.md similarity index 100% rename from docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py.md rename to docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py/index.md diff --git a/docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py.md b/docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py/index.md similarity index 100% rename from docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py.md rename to docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py/index.md diff --git a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy.md b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy/index.md similarity index 100% rename from docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy.md rename to docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy/index.md diff --git a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel.md b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel/index.md similarity index 100% rename from docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel.md rename to docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel/index.md diff --git a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm.md b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm/index.md similarity index 100% rename from docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm.md rename to docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm/index.md diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python/index.md similarity index 100% rename from docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python.md rename to docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python/index.md diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite/index.md similarity index 100% rename from docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite.md rename to docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite/index.md diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp/index.md similarity index 100% rename from docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp.md rename to docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp/index.md diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python/index.md similarity index 100% rename from docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python.md rename to docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python/index.md diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference/index.md similarity index 100% rename from docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference.md rename to docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference/index.md diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf/index.md similarity index 100% rename from docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf.md rename to docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf/index.md diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app/index.md similarity index 100% rename from docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app.md rename to docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app/index.md diff --git a/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia.md b/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia/index.md similarity index 100% rename from docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia.md rename to docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia/index.md diff --git a/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference.md b/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference/index.md similarity index 100% rename from docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference.md rename to docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference/index.md diff --git a/docs/Modular-application-pipeline/app-image-corner-detection.md b/docs/Modular-application-pipeline/app-image-corner-detection/index.md similarity index 100% rename from docs/Modular-application-pipeline/app-image-corner-detection.md rename to docs/Modular-application-pipeline/app-image-corner-detection/index.md diff --git a/docs/Platform-information/detect-cpu.md b/docs/Platform-information/detect-cpu/index.md similarity index 100% rename from docs/Platform-information/detect-cpu.md rename to docs/Platform-information/detect-cpu/index.md diff --git a/docs/Platform-information/detect-os.md b/docs/Platform-information/detect-os/index.md similarity index 100% rename from docs/Platform-information/detect-os.md rename to docs/Platform-information/detect-os/index.md diff --git a/docs/Python-automation/activate-python-venv.md b/docs/Python-automation/activate-python-venv/index.md similarity index 100% rename from docs/Python-automation/activate-python-venv.md rename to docs/Python-automation/activate-python-venv/index.md diff --git a/docs/Python-automation/get-generic-python-lib.md b/docs/Python-automation/get-generic-python-lib/index.md similarity index 100% rename from docs/Python-automation/get-generic-python-lib.md rename to docs/Python-automation/get-generic-python-lib/index.md diff --git a/docs/Python-automation/get-python3.md b/docs/Python-automation/get-python3/index.md similarity index 100% rename from docs/Python-automation/get-python3.md rename to docs/Python-automation/get-python3/index.md diff --git a/docs/Python-automation/install-generic-conda-package.md b/docs/Python-automation/install-generic-conda-package/index.md similarity index 100% rename from docs/Python-automation/install-generic-conda-package.md rename to docs/Python-automation/install-generic-conda-package/index.md diff --git a/docs/Python-automation/install-python-src.md b/docs/Python-automation/install-python-src/index.md similarity index 100% rename from docs/Python-automation/install-python-src.md rename to docs/Python-automation/install-python-src/index.md diff --git a/docs/Python-automation/install-python-venv.md b/docs/Python-automation/install-python-venv/index.md similarity index 100% rename from docs/Python-automation/install-python-venv.md rename to docs/Python-automation/install-python-venv/index.md diff --git a/docs/Remote-automation/remote-run-commands.md b/docs/Remote-automation/remote-run-commands/index.md similarity index 100% rename from docs/Remote-automation/remote-run-commands.md rename to docs/Remote-automation/remote-run-commands/index.md diff --git a/docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia.md b/docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia/index.md similarity index 100% rename from docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia.md rename to docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia/index.md diff --git a/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results.md b/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results/index.md similarity index 100% rename from docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results.md rename to docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results/index.md diff --git a/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia.md b/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia/index.md similarity index 100% rename from docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia.md rename to docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia/index.md diff --git a/docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission.md b/docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission/index.md similarity index 100% rename from docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission.md rename to docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission/index.md diff --git a/docs/Reproducibility-and-artifact-evaluation/get-ipol-src.md b/docs/Reproducibility-and-artifact-evaluation/get-ipol-src/index.md similarity index 100% rename from docs/Reproducibility-and-artifact-evaluation/get-ipol-src.md rename to docs/Reproducibility-and-artifact-evaluation/get-ipol-src/index.md diff --git a/docs/Reproducibility-and-artifact-evaluation/process-ae-users.md b/docs/Reproducibility-and-artifact-evaluation/process-ae-users/index.md similarity index 100% rename from docs/Reproducibility-and-artifact-evaluation/process-ae-users.md rename to docs/Reproducibility-and-artifact-evaluation/process-ae-users/index.md diff --git a/docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439.md b/docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439/index.md similarity index 100% rename from docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439.md rename to docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439/index.md diff --git a/docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima.md b/docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima/index.md similarity index 100% rename from docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima.md rename to docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima/index.md diff --git a/docs/Tests/print-croissant-desc.md b/docs/Tests/print-croissant-desc/index.md similarity index 100% rename from docs/Tests/print-croissant-desc.md rename to docs/Tests/print-croissant-desc/index.md diff --git a/docs/Tests/print-hello-world-java.md b/docs/Tests/print-hello-world-java/index.md similarity index 100% rename from docs/Tests/print-hello-world-java.md rename to docs/Tests/print-hello-world-java/index.md diff --git a/docs/Tests/print-hello-world-javac.md b/docs/Tests/print-hello-world-javac/index.md similarity index 100% rename from docs/Tests/print-hello-world-javac.md rename to docs/Tests/print-hello-world-javac/index.md diff --git a/docs/Tests/print-hello-world-py.md b/docs/Tests/print-hello-world-py/index.md similarity index 100% rename from docs/Tests/print-hello-world-py.md rename to docs/Tests/print-hello-world-py/index.md diff --git a/docs/Tests/print-hello-world.md b/docs/Tests/print-hello-world/index.md similarity index 100% rename from docs/Tests/print-hello-world.md rename to docs/Tests/print-hello-world/index.md diff --git a/docs/Tests/print-python-version.md b/docs/Tests/print-python-version/index.md similarity index 100% rename from docs/Tests/print-python-version.md rename to docs/Tests/print-python-version/index.md diff --git a/docs/Tests/run-python.md b/docs/Tests/run-python/index.md similarity index 100% rename from docs/Tests/run-python.md rename to docs/Tests/run-python/index.md diff --git a/docs/Tests/test-deps-conditions.md b/docs/Tests/test-deps-conditions/index.md similarity index 100% rename from docs/Tests/test-deps-conditions.md rename to docs/Tests/test-deps-conditions/index.md diff --git a/docs/Tests/test-download-and-extract-artifacts.md b/docs/Tests/test-download-and-extract-artifacts/index.md similarity index 100% rename from docs/Tests/test-download-and-extract-artifacts.md rename to docs/Tests/test-download-and-extract-artifacts/index.md diff --git a/docs/Tests/test-set-sys-user-cm.md b/docs/Tests/test-set-sys-user-cm/index.md similarity index 100% rename from docs/Tests/test-set-sys-user-cm.md rename to docs/Tests/test-set-sys-user-cm/index.md diff --git a/docs/Tests/upgrade-python-pip.md b/docs/Tests/upgrade-python-pip/index.md similarity index 100% rename from docs/Tests/upgrade-python-pip.md rename to docs/Tests/upgrade-python-pip/index.md diff --git a/docs/TinyML-automation/create-fpgaconvnet-app-tinyml.md b/docs/TinyML-automation/create-fpgaconvnet-app-tinyml/index.md similarity index 100% rename from docs/TinyML-automation/create-fpgaconvnet-app-tinyml.md rename to docs/TinyML-automation/create-fpgaconvnet-app-tinyml/index.md diff --git a/docs/TinyML-automation/create-fpgaconvnet-config-tinyml.md b/docs/TinyML-automation/create-fpgaconvnet-config-tinyml/index.md similarity index 100% rename from docs/TinyML-automation/create-fpgaconvnet-config-tinyml.md rename to docs/TinyML-automation/create-fpgaconvnet-config-tinyml/index.md diff --git a/docs/TinyML-automation/flash-tinyml-binary.md b/docs/TinyML-automation/flash-tinyml-binary/index.md similarity index 100% rename from docs/TinyML-automation/flash-tinyml-binary.md rename to docs/TinyML-automation/flash-tinyml-binary/index.md diff --git a/docs/TinyML-automation/get-microtvm.md b/docs/TinyML-automation/get-microtvm/index.md similarity index 100% rename from docs/TinyML-automation/get-microtvm.md rename to docs/TinyML-automation/get-microtvm/index.md diff --git a/docs/TinyML-automation/get-zephyr-sdk.md b/docs/TinyML-automation/get-zephyr-sdk/index.md similarity index 100% rename from docs/TinyML-automation/get-zephyr-sdk.md rename to docs/TinyML-automation/get-zephyr-sdk/index.md diff --git a/docs/TinyML-automation/get-zephyr.md b/docs/TinyML-automation/get-zephyr/index.md similarity index 100% rename from docs/TinyML-automation/get-zephyr.md rename to docs/TinyML-automation/get-zephyr/index.md diff --git a/mkdocs.yml b/mkdocs.yml index 5dedc979b..da67a78d5 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -22,290 +22,290 @@ nav: - CM Scripts: - index.md - Python automation: - - activate-python-venv: Python-automation\activate-python-venv.md - - get-generic-python-lib: Python-automation\get-generic-python-lib.md - - get-python3: Python-automation\get-python3.md - - install-generic-conda-package: Python-automation\install-generic-conda-package.md - - install-python-src: Python-automation\install-python-src.md - - install-python-venv: Python-automation\install-python-venv.md + - activate-python-venv: Python-automation\activate-python-venv\index.md + - get-generic-python-lib: Python-automation\get-generic-python-lib\index.md + - get-python3: Python-automation\get-python3\index.md + - install-generic-conda-package: Python-automation\install-generic-conda-package\index.md + - install-python-src: Python-automation\install-python-src\index.md + - install-python-venv: Python-automation\install-python-venv\index.md - MLPerf benchmark support: - - add-custom-nvidia-system: MLPerf-benchmark-support\add-custom-nvidia-system.md - - benchmark-any-mlperf-inference-implementation: MLPerf-benchmark-support\benchmark-any-mlperf-inference-implementation.md - - build-mlperf-inference-server-nvidia: MLPerf-benchmark-support\build-mlperf-inference-server-nvidia.md - - generate-mlperf-inference-submission: MLPerf-benchmark-support\generate-mlperf-inference-submission.md - - generate-mlperf-inference-user-conf: MLPerf-benchmark-support\generate-mlperf-inference-user-conf.md - - generate-mlperf-tiny-report: MLPerf-benchmark-support\generate-mlperf-tiny-report.md - - generate-mlperf-tiny-submission: MLPerf-benchmark-support\generate-mlperf-tiny-submission.md - - generate-nvidia-engine: MLPerf-benchmark-support\generate-nvidia-engine.md - - get-mlperf-inference-intel-scratch-space: MLPerf-benchmark-support\get-mlperf-inference-intel-scratch-space.md - - get-mlperf-inference-loadgen: MLPerf-benchmark-support\get-mlperf-inference-loadgen.md - - get-mlperf-inference-nvidia-common-code: MLPerf-benchmark-support\get-mlperf-inference-nvidia-common-code.md - - get-mlperf-inference-nvidia-scratch-space: MLPerf-benchmark-support\get-mlperf-inference-nvidia-scratch-space.md - - get-mlperf-inference-results: MLPerf-benchmark-support\get-mlperf-inference-results.md - - get-mlperf-inference-results-dir: MLPerf-benchmark-support\get-mlperf-inference-results-dir.md - - get-mlperf-inference-src: MLPerf-benchmark-support\get-mlperf-inference-src.md - - get-mlperf-inference-submission-dir: MLPerf-benchmark-support\get-mlperf-inference-submission-dir.md - - get-mlperf-inference-sut-configs: MLPerf-benchmark-support\get-mlperf-inference-sut-configs.md - - get-mlperf-inference-sut-description: MLPerf-benchmark-support\get-mlperf-inference-sut-description.md - - get-mlperf-logging: MLPerf-benchmark-support\get-mlperf-logging.md - - get-mlperf-power-dev: MLPerf-benchmark-support\get-mlperf-power-dev.md - - get-mlperf-tiny-eembc-energy-runner-src: MLPerf-benchmark-support\get-mlperf-tiny-eembc-energy-runner-src.md - - get-mlperf-tiny-src: MLPerf-benchmark-support\get-mlperf-tiny-src.md - - get-mlperf-training-nvidia-code: MLPerf-benchmark-support\get-mlperf-training-nvidia-code.md - - get-mlperf-training-src: MLPerf-benchmark-support\get-mlperf-training-src.md - - get-nvidia-mitten: MLPerf-benchmark-support\get-nvidia-mitten.md - - get-spec-ptd: MLPerf-benchmark-support\get-spec-ptd.md - - import-mlperf-inference-to-experiment: MLPerf-benchmark-support\import-mlperf-inference-to-experiment.md - - import-mlperf-tiny-to-experiment: MLPerf-benchmark-support\import-mlperf-tiny-to-experiment.md - - import-mlperf-training-to-experiment: MLPerf-benchmark-support\import-mlperf-training-to-experiment.md - - install-mlperf-logging-from-src: MLPerf-benchmark-support\install-mlperf-logging-from-src.md - - prepare-training-data-bert: MLPerf-benchmark-support\prepare-training-data-bert.md - - prepare-training-data-resnet: MLPerf-benchmark-support\prepare-training-data-resnet.md - - preprocess-mlperf-inference-submission: MLPerf-benchmark-support\preprocess-mlperf-inference-submission.md - - process-mlperf-accuracy: MLPerf-benchmark-support\process-mlperf-accuracy.md - - push-mlperf-inference-results-to-github: MLPerf-benchmark-support\push-mlperf-inference-results-to-github.md - - run-all-mlperf-models: MLPerf-benchmark-support\run-all-mlperf-models.md - - run-mlperf-inference-mobilenet-models: MLPerf-benchmark-support\run-mlperf-inference-mobilenet-models.md - - run-mlperf-inference-submission-checker: MLPerf-benchmark-support\run-mlperf-inference-submission-checker.md - - run-mlperf-power-client: MLPerf-benchmark-support\run-mlperf-power-client.md - - run-mlperf-power-server: MLPerf-benchmark-support\run-mlperf-power-server.md - - run-mlperf-training-submission-checker: MLPerf-benchmark-support\run-mlperf-training-submission-checker.md - - truncate-mlperf-inference-accuracy-log: MLPerf-benchmark-support\truncate-mlperf-inference-accuracy-log.md + - add-custom-nvidia-system: MLPerf-benchmark-support\add-custom-nvidia-system\index.md + - benchmark-any-mlperf-inference-implementation: MLPerf-benchmark-support\benchmark-any-mlperf-inference-implementation\index.md + - build-mlperf-inference-server-nvidia: MLPerf-benchmark-support\build-mlperf-inference-server-nvidia\index.md + - generate-mlperf-inference-submission: MLPerf-benchmark-support\generate-mlperf-inference-submission\index.md + - generate-mlperf-inference-user-conf: MLPerf-benchmark-support\generate-mlperf-inference-user-conf\index.md + - generate-mlperf-tiny-report: MLPerf-benchmark-support\generate-mlperf-tiny-report\index.md + - generate-mlperf-tiny-submission: MLPerf-benchmark-support\generate-mlperf-tiny-submission\index.md + - generate-nvidia-engine: MLPerf-benchmark-support\generate-nvidia-engine\index.md + - get-mlperf-inference-intel-scratch-space: MLPerf-benchmark-support\get-mlperf-inference-intel-scratch-space\index.md + - get-mlperf-inference-loadgen: MLPerf-benchmark-support\get-mlperf-inference-loadgen\index.md + - get-mlperf-inference-nvidia-common-code: MLPerf-benchmark-support\get-mlperf-inference-nvidia-common-code\index.md + - get-mlperf-inference-nvidia-scratch-space: MLPerf-benchmark-support\get-mlperf-inference-nvidia-scratch-space\index.md + - get-mlperf-inference-results: MLPerf-benchmark-support\get-mlperf-inference-results\index.md + - get-mlperf-inference-results-dir: MLPerf-benchmark-support\get-mlperf-inference-results-dir\index.md + - get-mlperf-inference-src: MLPerf-benchmark-support\get-mlperf-inference-src\index.md + - get-mlperf-inference-submission-dir: MLPerf-benchmark-support\get-mlperf-inference-submission-dir\index.md + - get-mlperf-inference-sut-configs: MLPerf-benchmark-support\get-mlperf-inference-sut-configs\index.md + - get-mlperf-inference-sut-description: MLPerf-benchmark-support\get-mlperf-inference-sut-description\index.md + - get-mlperf-logging: MLPerf-benchmark-support\get-mlperf-logging\index.md + - get-mlperf-power-dev: MLPerf-benchmark-support\get-mlperf-power-dev\index.md + - get-mlperf-tiny-eembc-energy-runner-src: MLPerf-benchmark-support\get-mlperf-tiny-eembc-energy-runner-src\index.md + - get-mlperf-tiny-src: MLPerf-benchmark-support\get-mlperf-tiny-src\index.md + - get-mlperf-training-nvidia-code: MLPerf-benchmark-support\get-mlperf-training-nvidia-code\index.md + - get-mlperf-training-src: MLPerf-benchmark-support\get-mlperf-training-src\index.md + - get-nvidia-mitten: MLPerf-benchmark-support\get-nvidia-mitten\index.md + - get-spec-ptd: MLPerf-benchmark-support\get-spec-ptd\index.md + - import-mlperf-inference-to-experiment: MLPerf-benchmark-support\import-mlperf-inference-to-experiment\index.md + - import-mlperf-tiny-to-experiment: MLPerf-benchmark-support\import-mlperf-tiny-to-experiment\index.md + - import-mlperf-training-to-experiment: MLPerf-benchmark-support\import-mlperf-training-to-experiment\index.md + - install-mlperf-logging-from-src: MLPerf-benchmark-support\install-mlperf-logging-from-src\index.md + - prepare-training-data-bert: MLPerf-benchmark-support\prepare-training-data-bert\index.md + - prepare-training-data-resnet: MLPerf-benchmark-support\prepare-training-data-resnet\index.md + - preprocess-mlperf-inference-submission: MLPerf-benchmark-support\preprocess-mlperf-inference-submission\index.md + - process-mlperf-accuracy: MLPerf-benchmark-support\process-mlperf-accuracy\index.md + - push-mlperf-inference-results-to-github: MLPerf-benchmark-support\push-mlperf-inference-results-to-github\index.md + - run-all-mlperf-models: MLPerf-benchmark-support\run-all-mlperf-models\index.md + - run-mlperf-inference-mobilenet-models: MLPerf-benchmark-support\run-mlperf-inference-mobilenet-models\index.md + - run-mlperf-inference-submission-checker: MLPerf-benchmark-support\run-mlperf-inference-submission-checker\index.md + - run-mlperf-power-client: MLPerf-benchmark-support\run-mlperf-power-client\index.md + - run-mlperf-power-server: MLPerf-benchmark-support\run-mlperf-power-server\index.md + - run-mlperf-training-submission-checker: MLPerf-benchmark-support\run-mlperf-training-submission-checker\index.md + - truncate-mlperf-inference-accuracy-log: MLPerf-benchmark-support\truncate-mlperf-inference-accuracy-log\index.md - Modular AI-ML application pipeline: - - app-image-classification-onnx-py: Modular-AI-ML-application-pipeline\app-image-classification-onnx-py.md - - app-image-classification-tf-onnx-cpp: Modular-AI-ML-application-pipeline\app-image-classification-tf-onnx-cpp.md - - app-image-classification-torch-py: Modular-AI-ML-application-pipeline\app-image-classification-torch-py.md - - app-image-classification-tvm-onnx-py: Modular-AI-ML-application-pipeline\app-image-classification-tvm-onnx-py.md - - app-stable-diffusion-onnx-py: Modular-AI-ML-application-pipeline\app-stable-diffusion-onnx-py.md + - app-image-classification-onnx-py: Modular-AI-ML-application-pipeline\app-image-classification-onnx-py\index.md + - app-image-classification-tf-onnx-cpp: Modular-AI-ML-application-pipeline\app-image-classification-tf-onnx-cpp\index.md + - app-image-classification-torch-py: Modular-AI-ML-application-pipeline\app-image-classification-torch-py\index.md + - app-image-classification-tvm-onnx-py: Modular-AI-ML-application-pipeline\app-image-classification-tvm-onnx-py\index.md + - app-stable-diffusion-onnx-py: Modular-AI-ML-application-pipeline\app-stable-diffusion-onnx-py\index.md - Modular application pipeline: - - app-image-corner-detection: Modular-application-pipeline\app-image-corner-detection.md + - app-image-corner-detection: Modular-application-pipeline\app-image-corner-detection\index.md - Modular MLPerf inference benchmark pipeline: - - app-loadgen-generic-python: Modular-MLPerf-inference-benchmark-pipeline\app-loadgen-generic-python.md - - app-mlperf-inference: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference.md - - app-mlperf-inference-ctuning-cpp-tflite: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-ctuning-cpp-tflite.md - - app-mlperf-inference-mlcommons-cpp: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-cpp.md - - app-mlperf-inference-mlcommons-python: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-python.md - - benchmark-program-mlperf: Modular-MLPerf-inference-benchmark-pipeline\benchmark-program-mlperf.md - - run-mlperf-inference-app: Modular-MLPerf-inference-benchmark-pipeline\run-mlperf-inference-app.md + - app-loadgen-generic-python: Modular-MLPerf-inference-benchmark-pipeline\app-loadgen-generic-python\index.md + - app-mlperf-inference: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference\index.md + - app-mlperf-inference-ctuning-cpp-tflite: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-ctuning-cpp-tflite\index.md + - app-mlperf-inference-mlcommons-cpp: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-cpp\index.md + - app-mlperf-inference-mlcommons-python: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-python\index.md + - benchmark-program-mlperf: Modular-MLPerf-inference-benchmark-pipeline\benchmark-program-mlperf\index.md + - run-mlperf-inference-app: Modular-MLPerf-inference-benchmark-pipeline\run-mlperf-inference-app\index.md - Modular MLPerf benchmarks: - - app-mlperf-inference-dummy: Modular-MLPerf-benchmarks\app-mlperf-inference-dummy.md - - app-mlperf-inference-intel: Modular-MLPerf-benchmarks\app-mlperf-inference-intel.md - - app-mlperf-inference-qualcomm: Modular-MLPerf-benchmarks\app-mlperf-inference-qualcomm.md + - app-mlperf-inference-dummy: Modular-MLPerf-benchmarks\app-mlperf-inference-dummy\index.md + - app-mlperf-inference-intel: Modular-MLPerf-benchmarks\app-mlperf-inference-intel\index.md + - app-mlperf-inference-qualcomm: Modular-MLPerf-benchmarks\app-mlperf-inference-qualcomm\index.md - Reproduce MLPerf benchmarks: - - app-mlperf-inference-nvidia: Reproduce-MLPerf-benchmarks\app-mlperf-inference-nvidia.md - - reproduce-mlperf-octoml-tinyml-results: Reproduce-MLPerf-benchmarks\reproduce-mlperf-octoml-tinyml-results.md - - reproduce-mlperf-training-nvidia: Reproduce-MLPerf-benchmarks\reproduce-mlperf-training-nvidia.md - - wrapper-reproduce-octoml-tinyml-submission: Reproduce-MLPerf-benchmarks\wrapper-reproduce-octoml-tinyml-submission.md + - app-mlperf-inference-nvidia: Reproduce-MLPerf-benchmarks\app-mlperf-inference-nvidia\index.md + - reproduce-mlperf-octoml-tinyml-results: Reproduce-MLPerf-benchmarks\reproduce-mlperf-octoml-tinyml-results\index.md + - reproduce-mlperf-training-nvidia: Reproduce-MLPerf-benchmarks\reproduce-mlperf-training-nvidia\index.md + - wrapper-reproduce-octoml-tinyml-submission: Reproduce-MLPerf-benchmarks\wrapper-reproduce-octoml-tinyml-submission\index.md - Modular MLPerf training benchmark pipeline: - - app-mlperf-training-nvidia: Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-nvidia.md - - app-mlperf-training-reference: Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-reference.md + - app-mlperf-training-nvidia: Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-nvidia\index.md + - app-mlperf-training-reference: Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-reference\index.md - DevOps automation: - - benchmark-program: DevOps-automation\benchmark-program.md - - compile-program: DevOps-automation\compile-program.md - - convert-csv-to-md: DevOps-automation\convert-csv-to-md.md - - copy-to-clipboard: DevOps-automation\copy-to-clipboard.md - - create-conda-env: DevOps-automation\create-conda-env.md - - create-patch: DevOps-automation\create-patch.md - - detect-sudo: DevOps-automation\detect-sudo.md - - download-and-extract: DevOps-automation\download-and-extract.md - - download-file: DevOps-automation\download-file.md - - download-torrent: DevOps-automation\download-torrent.md - - extract-file: DevOps-automation\extract-file.md - - fail: DevOps-automation\fail.md - - get-conda: DevOps-automation\get-conda.md - - get-git-repo: DevOps-automation\get-git-repo.md - - get-github-cli: DevOps-automation\get-github-cli.md - - pull-git-repo: DevOps-automation\pull-git-repo.md - - push-csv-to-spreadsheet: DevOps-automation\push-csv-to-spreadsheet.md - - set-device-settings-qaic: DevOps-automation\set-device-settings-qaic.md - - set-echo-off-win: DevOps-automation\set-echo-off-win.md - - set-performance-mode: DevOps-automation\set-performance-mode.md - - set-sqlite-dir: DevOps-automation\set-sqlite-dir.md - - tar-my-folder: DevOps-automation\tar-my-folder.md + - benchmark-program: DevOps-automation\benchmark-program\index.md + - compile-program: DevOps-automation\compile-program\index.md + - convert-csv-to-md: DevOps-automation\convert-csv-to-md\index.md + - copy-to-clipboard: DevOps-automation\copy-to-clipboard\index.md + - create-conda-env: DevOps-automation\create-conda-env\index.md + - create-patch: DevOps-automation\create-patch\index.md + - detect-sudo: DevOps-automation\detect-sudo\index.md + - download-and-extract: DevOps-automation\download-and-extract\index.md + - download-file: DevOps-automation\download-file\index.md + - download-torrent: DevOps-automation\download-torrent\index.md + - extract-file: DevOps-automation\extract-file\index.md + - fail: DevOps-automation\fail\index.md + - get-conda: DevOps-automation\get-conda\index.md + - get-git-repo: DevOps-automation\get-git-repo\index.md + - get-github-cli: DevOps-automation\get-github-cli\index.md + - pull-git-repo: DevOps-automation\pull-git-repo\index.md + - push-csv-to-spreadsheet: DevOps-automation\push-csv-to-spreadsheet\index.md + - set-device-settings-qaic: DevOps-automation\set-device-settings-qaic\index.md + - set-echo-off-win: DevOps-automation\set-echo-off-win\index.md + - set-performance-mode: DevOps-automation\set-performance-mode\index.md + - set-sqlite-dir: DevOps-automation\set-sqlite-dir\index.md + - tar-my-folder: DevOps-automation\tar-my-folder\index.md - Docker automation: - - build-docker-image: Docker-automation\build-docker-image.md - - build-dockerfile: Docker-automation\build-dockerfile.md - - prune-docker: Docker-automation\prune-docker.md - - run-docker-container: Docker-automation\run-docker-container.md + - build-docker-image: Docker-automation\build-docker-image\index.md + - build-dockerfile: Docker-automation\build-dockerfile\index.md + - prune-docker: Docker-automation\prune-docker\index.md + - run-docker-container: Docker-automation\run-docker-container\index.md - AI-ML optimization: - - calibrate-model-for.qaic: AI-ML-optimization\calibrate-model-for.qaic.md - - compile-model-for.qaic: AI-ML-optimization\compile-model-for.qaic.md - - prune-bert-models: AI-ML-optimization\prune-bert-models.md + - calibrate-model-for.qaic: AI-ML-optimization\calibrate-model-for.qaic\index.md + - compile-model-for.qaic: AI-ML-optimization\compile-model-for.qaic\index.md + - prune-bert-models: AI-ML-optimization\prune-bert-models\index.md - AI-ML models: - - convert-ml-model-huggingface-to-onnx: AI-ML-models\convert-ml-model-huggingface-to-onnx.md - - get-bert-squad-vocab: AI-ML-models\get-bert-squad-vocab.md - - get-dlrm: AI-ML-models\get-dlrm.md - - get-ml-model-3d-unet-kits19: AI-ML-models\get-ml-model-3d-unet-kits19.md - - get-ml-model-bert-base-squad: AI-ML-models\get-ml-model-bert-base-squad.md - - get-ml-model-bert-large-squad: AI-ML-models\get-ml-model-bert-large-squad.md - - get-ml-model-dlrm-terabyte: AI-ML-models\get-ml-model-dlrm-terabyte.md - - get-ml-model-efficientnet-lite: AI-ML-models\get-ml-model-efficientnet-lite.md - - get-ml-model-gptj: AI-ML-models\get-ml-model-gptj.md - - get-ml-model-huggingface-zoo: AI-ML-models\get-ml-model-huggingface-zoo.md - - get-ml-model-llama2: AI-ML-models\get-ml-model-llama2.md - - get-ml-model-mobilenet: AI-ML-models\get-ml-model-mobilenet.md - - get-ml-model-neuralmagic-zoo: AI-ML-models\get-ml-model-neuralmagic-zoo.md - - get-ml-model-resnet50: AI-ML-models\get-ml-model-resnet50.md - - get-ml-model-retinanet: AI-ML-models\get-ml-model-retinanet.md - - get-ml-model-retinanet-nvidia: AI-ML-models\get-ml-model-retinanet-nvidia.md - - get-ml-model-rnnt: AI-ML-models\get-ml-model-rnnt.md - - get-ml-model-stable-diffusion: AI-ML-models\get-ml-model-stable-diffusion.md - - get-ml-model-tiny-resnet: AI-ML-models\get-ml-model-tiny-resnet.md - - get-ml-model-using-imagenet-from-model-zoo: AI-ML-models\get-ml-model-using-imagenet-from-model-zoo.md - - get-tvm-model: AI-ML-models\get-tvm-model.md + - convert-ml-model-huggingface-to-onnx: AI-ML-models\convert-ml-model-huggingface-to-onnx\index.md + - get-bert-squad-vocab: AI-ML-models\get-bert-squad-vocab\index.md + - get-dlrm: AI-ML-models\get-dlrm\index.md + - get-ml-model-3d-unet-kits19: AI-ML-models\get-ml-model-3d-unet-kits19\index.md + - get-ml-model-bert-base-squad: AI-ML-models\get-ml-model-bert-base-squad\index.md + - get-ml-model-bert-large-squad: AI-ML-models\get-ml-model-bert-large-squad\index.md + - get-ml-model-dlrm-terabyte: AI-ML-models\get-ml-model-dlrm-terabyte\index.md + - get-ml-model-efficientnet-lite: AI-ML-models\get-ml-model-efficientnet-lite\index.md + - get-ml-model-gptj: AI-ML-models\get-ml-model-gptj\index.md + - get-ml-model-huggingface-zoo: AI-ML-models\get-ml-model-huggingface-zoo\index.md + - get-ml-model-llama2: AI-ML-models\get-ml-model-llama2\index.md + - get-ml-model-mobilenet: AI-ML-models\get-ml-model-mobilenet\index.md + - get-ml-model-neuralmagic-zoo: AI-ML-models\get-ml-model-neuralmagic-zoo\index.md + - get-ml-model-resnet50: AI-ML-models\get-ml-model-resnet50\index.md + - get-ml-model-retinanet: AI-ML-models\get-ml-model-retinanet\index.md + - get-ml-model-retinanet-nvidia: AI-ML-models\get-ml-model-retinanet-nvidia\index.md + - get-ml-model-rnnt: AI-ML-models\get-ml-model-rnnt\index.md + - get-ml-model-stable-diffusion: AI-ML-models\get-ml-model-stable-diffusion\index.md + - get-ml-model-tiny-resnet: AI-ML-models\get-ml-model-tiny-resnet\index.md + - get-ml-model-using-imagenet-from-model-zoo: AI-ML-models\get-ml-model-using-imagenet-from-model-zoo\index.md + - get-tvm-model: AI-ML-models\get-tvm-model\index.md - CM automation: - - create-custom-cache-entry: CM-automation\create-custom-cache-entry.md + - create-custom-cache-entry: CM-automation\create-custom-cache-entry\index.md - TinyML automation: - - create-fpgaconvnet-app-tinyml: TinyML-automation\create-fpgaconvnet-app-tinyml.md - - create-fpgaconvnet-config-tinyml: TinyML-automation\create-fpgaconvnet-config-tinyml.md - - flash-tinyml-binary: TinyML-automation\flash-tinyml-binary.md - - get-microtvm: TinyML-automation\get-microtvm.md - - get-zephyr: TinyML-automation\get-zephyr.md - - get-zephyr-sdk: TinyML-automation\get-zephyr-sdk.md + - create-fpgaconvnet-app-tinyml: TinyML-automation\create-fpgaconvnet-app-tinyml\index.md + - create-fpgaconvnet-config-tinyml: TinyML-automation\create-fpgaconvnet-config-tinyml\index.md + - flash-tinyml-binary: TinyML-automation\flash-tinyml-binary\index.md + - get-microtvm: TinyML-automation\get-microtvm\index.md + - get-zephyr: TinyML-automation\get-zephyr\index.md + - get-zephyr-sdk: TinyML-automation\get-zephyr-sdk\index.md - Cloud automation: - - destroy-terraform: Cloud-automation\destroy-terraform.md - - get-aws-cli: Cloud-automation\get-aws-cli.md - - get-terraform: Cloud-automation\get-terraform.md - - install-aws-cli: Cloud-automation\install-aws-cli.md - - install-terraform-from-src: Cloud-automation\install-terraform-from-src.md - - run-terraform: Cloud-automation\run-terraform.md + - destroy-terraform: Cloud-automation\destroy-terraform\index.md + - get-aws-cli: Cloud-automation\get-aws-cli\index.md + - get-terraform: Cloud-automation\get-terraform\index.md + - install-aws-cli: Cloud-automation\install-aws-cli\index.md + - install-terraform-from-src: Cloud-automation\install-terraform-from-src\index.md + - run-terraform: Cloud-automation\run-terraform\index.md - Platform information: - - detect-cpu: Platform-information\detect-cpu.md - - detect-os: Platform-information\detect-os.md + - detect-cpu: Platform-information\detect-cpu\index.md + - detect-os: Platform-information\detect-os\index.md - Detection or installation of tools and artifacts: - - get-android-sdk: Detection-or-installation-of-tools-and-artifacts\get-android-sdk.md - - get-aria2: Detection-or-installation-of-tools-and-artifacts\get-aria2.md - - get-bazel: Detection-or-installation-of-tools-and-artifacts\get-bazel.md - - get-blis: Detection-or-installation-of-tools-and-artifacts\get-blis.md - - get-brew: Detection-or-installation-of-tools-and-artifacts\get-brew.md - - get-cmake: Detection-or-installation-of-tools-and-artifacts\get-cmake.md - - get-cmsis_5: Detection-or-installation-of-tools-and-artifacts\get-cmsis_5.md - - get-docker: Detection-or-installation-of-tools-and-artifacts\get-docker.md - - get-generic-sys-util: Detection-or-installation-of-tools-and-artifacts\get-generic-sys-util.md - - get-google-test: Detection-or-installation-of-tools-and-artifacts\get-google-test.md - - get-java: Detection-or-installation-of-tools-and-artifacts\get-java.md - - get-javac: Detection-or-installation-of-tools-and-artifacts\get-javac.md - - get-lib-armnn: Detection-or-installation-of-tools-and-artifacts\get-lib-armnn.md - - get-lib-dnnl: Detection-or-installation-of-tools-and-artifacts\get-lib-dnnl.md - - get-lib-protobuf: Detection-or-installation-of-tools-and-artifacts\get-lib-protobuf.md - - get-lib-qaic-api: Detection-or-installation-of-tools-and-artifacts\get-lib-qaic-api.md - - get-nvidia-docker: Detection-or-installation-of-tools-and-artifacts\get-nvidia-docker.md - - get-openssl: Detection-or-installation-of-tools-and-artifacts\get-openssl.md - - get-rclone: Detection-or-installation-of-tools-and-artifacts\get-rclone.md - - get-sys-utils-cm: Detection-or-installation-of-tools-and-artifacts\get-sys-utils-cm.md - - get-sys-utils-min: Detection-or-installation-of-tools-and-artifacts\get-sys-utils-min.md - - get-xilinx-sdk: Detection-or-installation-of-tools-and-artifacts\get-xilinx-sdk.md - - get-zendnn: Detection-or-installation-of-tools-and-artifacts\get-zendnn.md - - install-bazel: Detection-or-installation-of-tools-and-artifacts\install-bazel.md - - install-cmake-prebuilt: Detection-or-installation-of-tools-and-artifacts\install-cmake-prebuilt.md - - install-gflags: Detection-or-installation-of-tools-and-artifacts\install-gflags.md - - install-github-cli: Detection-or-installation-of-tools-and-artifacts\install-github-cli.md - - install-numactl-from-src: Detection-or-installation-of-tools-and-artifacts\install-numactl-from-src.md - - install-openssl: Detection-or-installation-of-tools-and-artifacts\install-openssl.md + - get-android-sdk: Detection-or-installation-of-tools-and-artifacts\get-android-sdk\index.md + - get-aria2: Detection-or-installation-of-tools-and-artifacts\get-aria2\index.md + - get-bazel: Detection-or-installation-of-tools-and-artifacts\get-bazel\index.md + - get-blis: Detection-or-installation-of-tools-and-artifacts\get-blis\index.md + - get-brew: Detection-or-installation-of-tools-and-artifacts\get-brew\index.md + - get-cmake: Detection-or-installation-of-tools-and-artifacts\get-cmake\index.md + - get-cmsis_5: Detection-or-installation-of-tools-and-artifacts\get-cmsis_5\index.md + - get-docker: Detection-or-installation-of-tools-and-artifacts\get-docker\index.md + - get-generic-sys-util: Detection-or-installation-of-tools-and-artifacts\get-generic-sys-util\index.md + - get-google-test: Detection-or-installation-of-tools-and-artifacts\get-google-test\index.md + - get-java: Detection-or-installation-of-tools-and-artifacts\get-java\index.md + - get-javac: Detection-or-installation-of-tools-and-artifacts\get-javac\index.md + - get-lib-armnn: Detection-or-installation-of-tools-and-artifacts\get-lib-armnn\index.md + - get-lib-dnnl: Detection-or-installation-of-tools-and-artifacts\get-lib-dnnl\index.md + - get-lib-protobuf: Detection-or-installation-of-tools-and-artifacts\get-lib-protobuf\index.md + - get-lib-qaic-api: Detection-or-installation-of-tools-and-artifacts\get-lib-qaic-api\index.md + - get-nvidia-docker: Detection-or-installation-of-tools-and-artifacts\get-nvidia-docker\index.md + - get-openssl: Detection-or-installation-of-tools-and-artifacts\get-openssl\index.md + - get-rclone: Detection-or-installation-of-tools-and-artifacts\get-rclone\index.md + - get-sys-utils-cm: Detection-or-installation-of-tools-and-artifacts\get-sys-utils-cm\index.md + - get-sys-utils-min: Detection-or-installation-of-tools-and-artifacts\get-sys-utils-min\index.md + - get-xilinx-sdk: Detection-or-installation-of-tools-and-artifacts\get-xilinx-sdk\index.md + - get-zendnn: Detection-or-installation-of-tools-and-artifacts\get-zendnn\index.md + - install-bazel: Detection-or-installation-of-tools-and-artifacts\install-bazel\index.md + - install-cmake-prebuilt: Detection-or-installation-of-tools-and-artifacts\install-cmake-prebuilt\index.md + - install-gflags: Detection-or-installation-of-tools-and-artifacts\install-gflags\index.md + - install-github-cli: Detection-or-installation-of-tools-and-artifacts\install-github-cli\index.md + - install-numactl-from-src: Detection-or-installation-of-tools-and-artifacts\install-numactl-from-src\index.md + - install-openssl: Detection-or-installation-of-tools-and-artifacts\install-openssl\index.md - Compiler automation: - - get-aocl: Compiler-automation\get-aocl.md - - get-cl: Compiler-automation\get-cl.md - - get-compiler-flags: Compiler-automation\get-compiler-flags.md - - get-compiler-rust: Compiler-automation\get-compiler-rust.md - - get-gcc: Compiler-automation\get-gcc.md - - get-go: Compiler-automation\get-go.md - - get-llvm: Compiler-automation\get-llvm.md - - install-gcc-src: Compiler-automation\install-gcc-src.md - - install-ipex-from-src: Compiler-automation\install-ipex-from-src.md - - install-llvm-prebuilt: Compiler-automation\install-llvm-prebuilt.md - - install-llvm-src: Compiler-automation\install-llvm-src.md - - install-onednn-from-src: Compiler-automation\install-onednn-from-src.md - - install-onnxruntime-from-src: Compiler-automation\install-onnxruntime-from-src.md - - install-pytorch-from-src: Compiler-automation\install-pytorch-from-src.md - - install-pytorch-kineto-from-src: Compiler-automation\install-pytorch-kineto-from-src.md - - install-torchvision-from-src: Compiler-automation\install-torchvision-from-src.md - - install-tpp-pytorch-extension: Compiler-automation\install-tpp-pytorch-extension.md - - install-transformers-from-src: Compiler-automation\install-transformers-from-src.md + - get-aocl: Compiler-automation\get-aocl\index.md + - get-cl: Compiler-automation\get-cl\index.md + - get-compiler-flags: Compiler-automation\get-compiler-flags\index.md + - get-compiler-rust: Compiler-automation\get-compiler-rust\index.md + - get-gcc: Compiler-automation\get-gcc\index.md + - get-go: Compiler-automation\get-go\index.md + - get-llvm: Compiler-automation\get-llvm\index.md + - install-gcc-src: Compiler-automation\install-gcc-src\index.md + - install-ipex-from-src: Compiler-automation\install-ipex-from-src\index.md + - install-llvm-prebuilt: Compiler-automation\install-llvm-prebuilt\index.md + - install-llvm-src: Compiler-automation\install-llvm-src\index.md + - install-onednn-from-src: Compiler-automation\install-onednn-from-src\index.md + - install-onnxruntime-from-src: Compiler-automation\install-onnxruntime-from-src\index.md + - install-pytorch-from-src: Compiler-automation\install-pytorch-from-src\index.md + - install-pytorch-kineto-from-src: Compiler-automation\install-pytorch-kineto-from-src\index.md + - install-torchvision-from-src: Compiler-automation\install-torchvision-from-src\index.md + - install-tpp-pytorch-extension: Compiler-automation\install-tpp-pytorch-extension\index.md + - install-transformers-from-src: Compiler-automation\install-transformers-from-src\index.md - CM Interface: - - get-cache-dir: CM-Interface\get-cache-dir.md + - get-cache-dir: CM-Interface\get-cache-dir\index.md - Legacy CK support: - - get-ck: Legacy-CK-support\get-ck.md - - get-ck-repo-mlops: Legacy-CK-support\get-ck-repo-mlops.md + - get-ck: Legacy-CK-support\get-ck\index.md + - get-ck-repo-mlops: Legacy-CK-support\get-ck-repo-mlops\index.md - AI-ML datasets: - - get-croissant: AI-ML-datasets\get-croissant.md - - get-dataset-cifar10: AI-ML-datasets\get-dataset-cifar10.md - - get-dataset-cnndm: AI-ML-datasets\get-dataset-cnndm.md - - get-dataset-coco: AI-ML-datasets\get-dataset-coco.md - - get-dataset-coco2014: AI-ML-datasets\get-dataset-coco2014.md - - get-dataset-criteo: AI-ML-datasets\get-dataset-criteo.md - - get-dataset-imagenet-aux: AI-ML-datasets\get-dataset-imagenet-aux.md - - get-dataset-imagenet-calibration: AI-ML-datasets\get-dataset-imagenet-calibration.md - - get-dataset-imagenet-helper: AI-ML-datasets\get-dataset-imagenet-helper.md - - get-dataset-imagenet-train: AI-ML-datasets\get-dataset-imagenet-train.md - - get-dataset-imagenet-val: AI-ML-datasets\get-dataset-imagenet-val.md - - get-dataset-kits19: AI-ML-datasets\get-dataset-kits19.md - - get-dataset-librispeech: AI-ML-datasets\get-dataset-librispeech.md - - get-dataset-openimages: AI-ML-datasets\get-dataset-openimages.md - - get-dataset-openimages-annotations: AI-ML-datasets\get-dataset-openimages-annotations.md - - get-dataset-openimages-calibration: AI-ML-datasets\get-dataset-openimages-calibration.md - - get-dataset-openorca: AI-ML-datasets\get-dataset-openorca.md - - get-dataset-squad: AI-ML-datasets\get-dataset-squad.md - - get-dataset-squad-vocab: AI-ML-datasets\get-dataset-squad-vocab.md - - get-preprocessed-dataset-criteo: AI-ML-datasets\get-preprocessed-dataset-criteo.md - - get-preprocessed-dataset-generic: AI-ML-datasets\get-preprocessed-dataset-generic.md - - get-preprocessed-dataset-imagenet: AI-ML-datasets\get-preprocessed-dataset-imagenet.md - - get-preprocessed-dataset-kits19: AI-ML-datasets\get-preprocessed-dataset-kits19.md - - get-preprocessed-dataset-librispeech: AI-ML-datasets\get-preprocessed-dataset-librispeech.md - - get-preprocessed-dataset-openimages: AI-ML-datasets\get-preprocessed-dataset-openimages.md - - get-preprocessed-dataset-openorca: AI-ML-datasets\get-preprocessed-dataset-openorca.md - - get-preprocessed-dataset-squad: AI-ML-datasets\get-preprocessed-dataset-squad.md + - get-croissant: AI-ML-datasets\get-croissant\index.md + - get-dataset-cifar10: AI-ML-datasets\get-dataset-cifar10\index.md + - get-dataset-cnndm: AI-ML-datasets\get-dataset-cnndm\index.md + - get-dataset-coco: AI-ML-datasets\get-dataset-coco\index.md + - get-dataset-coco2014: AI-ML-datasets\get-dataset-coco2014\index.md + - get-dataset-criteo: AI-ML-datasets\get-dataset-criteo\index.md + - get-dataset-imagenet-aux: AI-ML-datasets\get-dataset-imagenet-aux\index.md + - get-dataset-imagenet-calibration: AI-ML-datasets\get-dataset-imagenet-calibration\index.md + - get-dataset-imagenet-helper: AI-ML-datasets\get-dataset-imagenet-helper\index.md + - get-dataset-imagenet-train: AI-ML-datasets\get-dataset-imagenet-train\index.md + - get-dataset-imagenet-val: AI-ML-datasets\get-dataset-imagenet-val\index.md + - get-dataset-kits19: AI-ML-datasets\get-dataset-kits19\index.md + - get-dataset-librispeech: AI-ML-datasets\get-dataset-librispeech\index.md + - get-dataset-openimages: AI-ML-datasets\get-dataset-openimages\index.md + - get-dataset-openimages-annotations: AI-ML-datasets\get-dataset-openimages-annotations\index.md + - get-dataset-openimages-calibration: AI-ML-datasets\get-dataset-openimages-calibration\index.md + - get-dataset-openorca: AI-ML-datasets\get-dataset-openorca\index.md + - get-dataset-squad: AI-ML-datasets\get-dataset-squad\index.md + - get-dataset-squad-vocab: AI-ML-datasets\get-dataset-squad-vocab\index.md + - get-preprocessed-dataset-criteo: AI-ML-datasets\get-preprocessed-dataset-criteo\index.md + - get-preprocessed-dataset-generic: AI-ML-datasets\get-preprocessed-dataset-generic\index.md + - get-preprocessed-dataset-imagenet: AI-ML-datasets\get-preprocessed-dataset-imagenet\index.md + - get-preprocessed-dataset-kits19: AI-ML-datasets\get-preprocessed-dataset-kits19\index.md + - get-preprocessed-dataset-librispeech: AI-ML-datasets\get-preprocessed-dataset-librispeech\index.md + - get-preprocessed-dataset-openimages: AI-ML-datasets\get-preprocessed-dataset-openimages\index.md + - get-preprocessed-dataset-openorca: AI-ML-datasets\get-preprocessed-dataset-openorca\index.md + - get-preprocessed-dataset-squad: AI-ML-datasets\get-preprocessed-dataset-squad\index.md - CUDA automation: - - get-cuda: CUDA-automation\get-cuda.md - - get-cuda-devices: CUDA-automation\get-cuda-devices.md - - get-cudnn: CUDA-automation\get-cudnn.md - - get-tensorrt: CUDA-automation\get-tensorrt.md - - install-cuda-package-manager: CUDA-automation\install-cuda-package-manager.md - - install-cuda-prebuilt: CUDA-automation\install-cuda-prebuilt.md + - get-cuda: CUDA-automation\get-cuda\index.md + - get-cuda-devices: CUDA-automation\get-cuda-devices\index.md + - get-cudnn: CUDA-automation\get-cudnn\index.md + - get-tensorrt: CUDA-automation\get-tensorrt\index.md + - install-cuda-package-manager: CUDA-automation\install-cuda-package-manager\index.md + - install-cuda-prebuilt: CUDA-automation\install-cuda-prebuilt\index.md - AI-ML frameworks: - - get-google-saxml: AI-ML-frameworks\get-google-saxml.md - - get-onnxruntime-prebuilt: AI-ML-frameworks\get-onnxruntime-prebuilt.md - - get-qaic-apps-sdk: AI-ML-frameworks\get-qaic-apps-sdk.md - - get-qaic-platform-sdk: AI-ML-frameworks\get-qaic-platform-sdk.md - - get-qaic-software-kit: AI-ML-frameworks\get-qaic-software-kit.md - - get-rocm: AI-ML-frameworks\get-rocm.md - - get-tvm: AI-ML-frameworks\get-tvm.md - - install-qaic-compute-sdk-from-src: AI-ML-frameworks\install-qaic-compute-sdk-from-src.md - - install-rocm: AI-ML-frameworks\install-rocm.md - - install-tensorflow-for-c: AI-ML-frameworks\install-tensorflow-for-c.md - - install-tensorflow-from-src: AI-ML-frameworks\install-tensorflow-from-src.md - - install-tflite-from-src: AI-ML-frameworks\install-tflite-from-src.md + - get-google-saxml: AI-ML-frameworks\get-google-saxml\index.md + - get-onnxruntime-prebuilt: AI-ML-frameworks\get-onnxruntime-prebuilt\index.md + - get-qaic-apps-sdk: AI-ML-frameworks\get-qaic-apps-sdk\index.md + - get-qaic-platform-sdk: AI-ML-frameworks\get-qaic-platform-sdk\index.md + - get-qaic-software-kit: AI-ML-frameworks\get-qaic-software-kit\index.md + - get-rocm: AI-ML-frameworks\get-rocm\index.md + - get-tvm: AI-ML-frameworks\get-tvm\index.md + - install-qaic-compute-sdk-from-src: AI-ML-frameworks\install-qaic-compute-sdk-from-src\index.md + - install-rocm: AI-ML-frameworks\install-rocm\index.md + - install-tensorflow-for-c: AI-ML-frameworks\install-tensorflow-for-c\index.md + - install-tensorflow-from-src: AI-ML-frameworks\install-tensorflow-from-src\index.md + - install-tflite-from-src: AI-ML-frameworks\install-tflite-from-src\index.md - Reproducibility and artifact evaluation: - - get-ipol-src: Reproducibility-and-artifact-evaluation\get-ipol-src.md - - process-ae-users: Reproducibility-and-artifact-evaluation\process-ae-users.md - - reproduce-ipol-paper-2022-439: Reproducibility-and-artifact-evaluation\reproduce-ipol-paper-2022-439.md - - reproduce-micro-paper-2023-victima: Reproducibility-and-artifact-evaluation\reproduce-micro-paper-2023-victima.md + - get-ipol-src: Reproducibility-and-artifact-evaluation\get-ipol-src\index.md + - process-ae-users: Reproducibility-and-artifact-evaluation\process-ae-users\index.md + - reproduce-ipol-paper-2022-439: Reproducibility-and-artifact-evaluation\reproduce-ipol-paper-2022-439\index.md + - reproduce-micro-paper-2023-victima: Reproducibility-and-artifact-evaluation\reproduce-micro-paper-2023-victima\index.md - GUI: - - gui: GUI\gui.md + - gui: GUI\gui\index.md - Collective benchmarking: - - launch-benchmark: Collective-benchmarking\launch-benchmark.md + - launch-benchmark: Collective-benchmarking\launch-benchmark\index.md - Tests: - - print-any-text: Tests\print-any-text.md - - print-croissant-desc: Tests\print-croissant-desc.md - - print-hello-world: Tests\print-hello-world.md - - print-hello-world-java: Tests\print-hello-world-java.md - - print-hello-world-javac: Tests\print-hello-world-javac.md - - print-hello-world-py: Tests\print-hello-world-py.md - - print-python-version: Tests\print-python-version.md - - run-python: Tests\run-python.md - - test-cm-core: Tests\test-cm-core.md - - test-cm-script-pipeline: Tests\test-cm-script-pipeline.md - - test-deps-conditions: Tests\test-deps-conditions.md - - test-deps-conditions2: Tests\test-deps-conditions2.md - - test-download-and-extract-artifacts: Tests\test-download-and-extract-artifacts.md - - test-set-sys-user-cm: Tests\test-set-sys-user-cm.md - - upgrade-python-pip: Tests\upgrade-python-pip.md + - print-any-text: Tests\print-any-text\index.md + - print-croissant-desc: Tests\print-croissant-desc\index.md + - print-hello-world: Tests\print-hello-world\index.md + - print-hello-world-java: Tests\print-hello-world-java\index.md + - print-hello-world-javac: Tests\print-hello-world-javac\index.md + - print-hello-world-py: Tests\print-hello-world-py\index.md + - print-python-version: Tests\print-python-version\index.md + - run-python: Tests\run-python\index.md + - test-cm-core: Tests\test-cm-core\index.md + - test-cm-script-pipeline: Tests\test-cm-script-pipeline\index.md + - test-deps-conditions: Tests\test-deps-conditions\index.md + - test-deps-conditions2: Tests\test-deps-conditions2\index.md + - test-download-and-extract-artifacts: Tests\test-download-and-extract-artifacts\index.md + - test-set-sys-user-cm: Tests\test-set-sys-user-cm\index.md + - upgrade-python-pip: Tests\upgrade-python-pip\index.md - Dashboard automation: - - publish-results-to-dashboard: Dashboard-automation\publish-results-to-dashboard.md + - publish-results-to-dashboard: Dashboard-automation\publish-results-to-dashboard\index.md - Remote automation: - - remote-run-commands: Remote-automation\remote-run-commands.md + - remote-run-commands: Remote-automation\remote-run-commands\index.md - CM interface prototyping: - - test-debug: CM-interface-prototyping\test-debug.md - - test-mlperf-inference-retinanet: CM-interface-prototyping\test-mlperf-inference-retinanet.md + - test-debug: CM-interface-prototyping\test-debug\index.md + - test-mlperf-inference-retinanet: CM-interface-prototyping\test-mlperf-inference-retinanet\index.md markdown_extensions: - pymdownx.tasklist: diff --git a/mkdocsHelper.py b/mkdocsHelper.py index a797bcf7d..489acb959 100644 --- a/mkdocsHelper.py +++ b/mkdocsHelper.py @@ -52,7 +52,7 @@ def print_category_structure(category_dict): folder_name = folder.replace("/", "-") source_path_folder = os.path.join("script", folder_name) source_file_path = os.path.join(source_path_folder, "README.md") - target_path = os.path.join(category_path_formated, folder_name + ".md") + target_path = os.path.join(category_path_formated, os.path.join(folder_name, "index.md")) if not os.path.exists(source_file_path): # print(f"Source file does not exist: {source_file_path}") continue @@ -78,7 +78,7 @@ def print_category_structure(category_dict): print(f" - {category.replace("/", "-")}:") for folder in folders: folder_name = folder.replace("/", "-") - target_path = os.path.join(category_path_formated, folder_name + ".md") + target_path = os.path.join(category_path_formated, os.path.join(folder_name, "index.md")) print(f" - {folder_name}: {target_path}") if __name__ == "__main__": From 4c7de3f532079b6e945f30aafb299d9bc30f78ee Mon Sep 17 00:00:00 2001 From: ANANDHU S <71482562+anandhu-eng@users.noreply.github.com> Date: Sat, 25 May 2024 00:13:13 +0530 Subject: [PATCH 6/7] values formatted for gh-pages --- mkdocs.yml | 510 ++++++++++++++++++++++++++--------------------------- 1 file changed, 255 insertions(+), 255 deletions(-) diff --git a/mkdocs.yml b/mkdocs.yml index da67a78d5..cea23b114 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -22,290 +22,290 @@ nav: - CM Scripts: - index.md - Python automation: - - activate-python-venv: Python-automation\activate-python-venv\index.md - - get-generic-python-lib: Python-automation\get-generic-python-lib\index.md - - get-python3: Python-automation\get-python3\index.md - - install-generic-conda-package: Python-automation\install-generic-conda-package\index.md - - install-python-src: Python-automation\install-python-src\index.md - - install-python-venv: Python-automation\install-python-venv\index.md + - activate-python-venv: Python-automation/activate-python-venv/index.md + - get-generic-python-lib: Python-automation/get-generic-python-lib/index.md + - get-python3: Python-automation/get-python3/index.md + - install-generic-conda-package: Python-automation/install-generic-conda-package/index.md + - install-python-src: Python-automation/install-python-src/index.md + - install-python-venv: Python-automation/install-python-venv/index.md - MLPerf benchmark support: - - add-custom-nvidia-system: MLPerf-benchmark-support\add-custom-nvidia-system\index.md - - benchmark-any-mlperf-inference-implementation: MLPerf-benchmark-support\benchmark-any-mlperf-inference-implementation\index.md - - build-mlperf-inference-server-nvidia: MLPerf-benchmark-support\build-mlperf-inference-server-nvidia\index.md - - generate-mlperf-inference-submission: MLPerf-benchmark-support\generate-mlperf-inference-submission\index.md - - generate-mlperf-inference-user-conf: MLPerf-benchmark-support\generate-mlperf-inference-user-conf\index.md - - generate-mlperf-tiny-report: MLPerf-benchmark-support\generate-mlperf-tiny-report\index.md - - generate-mlperf-tiny-submission: MLPerf-benchmark-support\generate-mlperf-tiny-submission\index.md - - generate-nvidia-engine: MLPerf-benchmark-support\generate-nvidia-engine\index.md - - get-mlperf-inference-intel-scratch-space: MLPerf-benchmark-support\get-mlperf-inference-intel-scratch-space\index.md - - get-mlperf-inference-loadgen: MLPerf-benchmark-support\get-mlperf-inference-loadgen\index.md - - get-mlperf-inference-nvidia-common-code: MLPerf-benchmark-support\get-mlperf-inference-nvidia-common-code\index.md - - get-mlperf-inference-nvidia-scratch-space: MLPerf-benchmark-support\get-mlperf-inference-nvidia-scratch-space\index.md - - get-mlperf-inference-results: MLPerf-benchmark-support\get-mlperf-inference-results\index.md - - get-mlperf-inference-results-dir: MLPerf-benchmark-support\get-mlperf-inference-results-dir\index.md - - get-mlperf-inference-src: MLPerf-benchmark-support\get-mlperf-inference-src\index.md - - get-mlperf-inference-submission-dir: MLPerf-benchmark-support\get-mlperf-inference-submission-dir\index.md - - get-mlperf-inference-sut-configs: MLPerf-benchmark-support\get-mlperf-inference-sut-configs\index.md - - get-mlperf-inference-sut-description: MLPerf-benchmark-support\get-mlperf-inference-sut-description\index.md - - get-mlperf-logging: MLPerf-benchmark-support\get-mlperf-logging\index.md - - get-mlperf-power-dev: MLPerf-benchmark-support\get-mlperf-power-dev\index.md - - get-mlperf-tiny-eembc-energy-runner-src: MLPerf-benchmark-support\get-mlperf-tiny-eembc-energy-runner-src\index.md - - get-mlperf-tiny-src: MLPerf-benchmark-support\get-mlperf-tiny-src\index.md - - get-mlperf-training-nvidia-code: MLPerf-benchmark-support\get-mlperf-training-nvidia-code\index.md - - get-mlperf-training-src: MLPerf-benchmark-support\get-mlperf-training-src\index.md - - get-nvidia-mitten: MLPerf-benchmark-support\get-nvidia-mitten\index.md - - get-spec-ptd: MLPerf-benchmark-support\get-spec-ptd\index.md - - import-mlperf-inference-to-experiment: MLPerf-benchmark-support\import-mlperf-inference-to-experiment\index.md - - import-mlperf-tiny-to-experiment: MLPerf-benchmark-support\import-mlperf-tiny-to-experiment\index.md - - import-mlperf-training-to-experiment: MLPerf-benchmark-support\import-mlperf-training-to-experiment\index.md - - install-mlperf-logging-from-src: MLPerf-benchmark-support\install-mlperf-logging-from-src\index.md - - prepare-training-data-bert: MLPerf-benchmark-support\prepare-training-data-bert\index.md - - prepare-training-data-resnet: MLPerf-benchmark-support\prepare-training-data-resnet\index.md - - preprocess-mlperf-inference-submission: MLPerf-benchmark-support\preprocess-mlperf-inference-submission\index.md - - process-mlperf-accuracy: MLPerf-benchmark-support\process-mlperf-accuracy\index.md - - push-mlperf-inference-results-to-github: MLPerf-benchmark-support\push-mlperf-inference-results-to-github\index.md - - run-all-mlperf-models: MLPerf-benchmark-support\run-all-mlperf-models\index.md - - run-mlperf-inference-mobilenet-models: MLPerf-benchmark-support\run-mlperf-inference-mobilenet-models\index.md - - run-mlperf-inference-submission-checker: MLPerf-benchmark-support\run-mlperf-inference-submission-checker\index.md - - run-mlperf-power-client: MLPerf-benchmark-support\run-mlperf-power-client\index.md - - run-mlperf-power-server: MLPerf-benchmark-support\run-mlperf-power-server\index.md - - run-mlperf-training-submission-checker: MLPerf-benchmark-support\run-mlperf-training-submission-checker\index.md - - truncate-mlperf-inference-accuracy-log: MLPerf-benchmark-support\truncate-mlperf-inference-accuracy-log\index.md + - add-custom-nvidia-system: MLPerf-benchmark-support/add-custom-nvidia-system/index.md + - benchmark-any-mlperf-inference-implementation: MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation/index.md + - build-mlperf-inference-server-nvidia: MLPerf-benchmark-support/build-mlperf-inference-server-nvidia/index.md + - generate-mlperf-inference-submission: MLPerf-benchmark-support/generate-mlperf-inference-submission/index.md + - generate-mlperf-inference-user-conf: MLPerf-benchmark-support/generate-mlperf-inference-user-conf/index.md + - generate-mlperf-tiny-report: MLPerf-benchmark-support/generate-mlperf-tiny-report/index.md + - generate-mlperf-tiny-submission: MLPerf-benchmark-support/generate-mlperf-tiny-submission/index.md + - generate-nvidia-engine: MLPerf-benchmark-support/generate-nvidia-engine/index.md + - get-mlperf-inference-intel-scratch-space: MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space/index.md + - get-mlperf-inference-loadgen: MLPerf-benchmark-support/get-mlperf-inference-loadgen/index.md + - get-mlperf-inference-nvidia-common-code: MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code/index.md + - get-mlperf-inference-nvidia-scratch-space: MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space/index.md + - get-mlperf-inference-results: MLPerf-benchmark-support/get-mlperf-inference-results/index.md + - get-mlperf-inference-results-dir: MLPerf-benchmark-support/get-mlperf-inference-results-dir/index.md + - get-mlperf-inference-src: MLPerf-benchmark-support/get-mlperf-inference-src/index.md + - get-mlperf-inference-submission-dir: MLPerf-benchmark-support/get-mlperf-inference-submission-dir/index.md + - get-mlperf-inference-sut-configs: MLPerf-benchmark-support/get-mlperf-inference-sut-configs/index.md + - get-mlperf-inference-sut-description: MLPerf-benchmark-support/get-mlperf-inference-sut-description/index.md + - get-mlperf-logging: MLPerf-benchmark-support/get-mlperf-logging/index.md + - get-mlperf-power-dev: MLPerf-benchmark-support/get-mlperf-power-dev/index.md + - get-mlperf-tiny-eembc-energy-runner-src: MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src/index.md + - get-mlperf-tiny-src: MLPerf-benchmark-support/get-mlperf-tiny-src/index.md + - get-mlperf-training-nvidia-code: MLPerf-benchmark-support/get-mlperf-training-nvidia-code/index.md + - get-mlperf-training-src: MLPerf-benchmark-support/get-mlperf-training-src/index.md + - get-nvidia-mitten: MLPerf-benchmark-support/get-nvidia-mitten/index.md + - get-spec-ptd: MLPerf-benchmark-support/get-spec-ptd/index.md + - import-mlperf-inference-to-experiment: MLPerf-benchmark-support/import-mlperf-inference-to-experiment/index.md + - import-mlperf-tiny-to-experiment: MLPerf-benchmark-support/import-mlperf-tiny-to-experiment/index.md + - import-mlperf-training-to-experiment: MLPerf-benchmark-support/import-mlperf-training-to-experiment/index.md + - install-mlperf-logging-from-src: MLPerf-benchmark-support/install-mlperf-logging-from-src/index.md + - prepare-training-data-bert: MLPerf-benchmark-support/prepare-training-data-bert/index.md + - prepare-training-data-resnet: MLPerf-benchmark-support/prepare-training-data-resnet/index.md + - preprocess-mlperf-inference-submission: MLPerf-benchmark-support/preprocess-mlperf-inference-submission/index.md + - process-mlperf-accuracy: MLPerf-benchmark-support/process-mlperf-accuracy/index.md + - push-mlperf-inference-results-to-github: MLPerf-benchmark-support/push-mlperf-inference-results-to-github/index.md + - run-all-mlperf-models: MLPerf-benchmark-support/run-all-mlperf-models/index.md + - run-mlperf-inference-mobilenet-models: MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models/index.md + - run-mlperf-inference-submission-checker: MLPerf-benchmark-support/run-mlperf-inference-submission-checker/index.md + - run-mlperf-power-client: MLPerf-benchmark-support/run-mlperf-power-client/index.md + - run-mlperf-power-server: MLPerf-benchmark-support/run-mlperf-power-server/index.md + - run-mlperf-training-submission-checker: MLPerf-benchmark-support/run-mlperf-training-submission-checker/index.md + - truncate-mlperf-inference-accuracy-log: MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log/index.md - Modular AI-ML application pipeline: - - app-image-classification-onnx-py: Modular-AI-ML-application-pipeline\app-image-classification-onnx-py\index.md - - app-image-classification-tf-onnx-cpp: Modular-AI-ML-application-pipeline\app-image-classification-tf-onnx-cpp\index.md - - app-image-classification-torch-py: Modular-AI-ML-application-pipeline\app-image-classification-torch-py\index.md - - app-image-classification-tvm-onnx-py: Modular-AI-ML-application-pipeline\app-image-classification-tvm-onnx-py\index.md - - app-stable-diffusion-onnx-py: Modular-AI-ML-application-pipeline\app-stable-diffusion-onnx-py\index.md + - app-image-classification-onnx-py: Modular-AI-ML-application-pipeline/app-image-classification-onnx-py/index.md + - app-image-classification-tf-onnx-cpp: Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp/index.md + - app-image-classification-torch-py: Modular-AI-ML-application-pipeline/app-image-classification-torch-py/index.md + - app-image-classification-tvm-onnx-py: Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py/index.md + - app-stable-diffusion-onnx-py: Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py/index.md - Modular application pipeline: - - app-image-corner-detection: Modular-application-pipeline\app-image-corner-detection\index.md + - app-image-corner-detection: Modular-application-pipeline/app-image-corner-detection/index.md - Modular MLPerf inference benchmark pipeline: - - app-loadgen-generic-python: Modular-MLPerf-inference-benchmark-pipeline\app-loadgen-generic-python\index.md - - app-mlperf-inference: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference\index.md - - app-mlperf-inference-ctuning-cpp-tflite: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-ctuning-cpp-tflite\index.md - - app-mlperf-inference-mlcommons-cpp: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-cpp\index.md - - app-mlperf-inference-mlcommons-python: Modular-MLPerf-inference-benchmark-pipeline\app-mlperf-inference-mlcommons-python\index.md - - benchmark-program-mlperf: Modular-MLPerf-inference-benchmark-pipeline\benchmark-program-mlperf\index.md - - run-mlperf-inference-app: Modular-MLPerf-inference-benchmark-pipeline\run-mlperf-inference-app\index.md + - app-loadgen-generic-python: Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python/index.md + - app-mlperf-inference: Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference/index.md + - app-mlperf-inference-ctuning-cpp-tflite: Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite/index.md + - app-mlperf-inference-mlcommons-cpp: Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp/index.md + - app-mlperf-inference-mlcommons-python: Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python/index.md + - benchmark-program-mlperf: Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf/index.md + - run-mlperf-inference-app: Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app/index.md - Modular MLPerf benchmarks: - - app-mlperf-inference-dummy: Modular-MLPerf-benchmarks\app-mlperf-inference-dummy\index.md - - app-mlperf-inference-intel: Modular-MLPerf-benchmarks\app-mlperf-inference-intel\index.md - - app-mlperf-inference-qualcomm: Modular-MLPerf-benchmarks\app-mlperf-inference-qualcomm\index.md + - app-mlperf-inference-dummy: Modular-MLPerf-benchmarks/app-mlperf-inference-dummy/index.md + - app-mlperf-inference-intel: Modular-MLPerf-benchmarks/app-mlperf-inference-intel/index.md + - app-mlperf-inference-qualcomm: Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm/index.md - Reproduce MLPerf benchmarks: - - app-mlperf-inference-nvidia: Reproduce-MLPerf-benchmarks\app-mlperf-inference-nvidia\index.md - - reproduce-mlperf-octoml-tinyml-results: Reproduce-MLPerf-benchmarks\reproduce-mlperf-octoml-tinyml-results\index.md - - reproduce-mlperf-training-nvidia: Reproduce-MLPerf-benchmarks\reproduce-mlperf-training-nvidia\index.md - - wrapper-reproduce-octoml-tinyml-submission: Reproduce-MLPerf-benchmarks\wrapper-reproduce-octoml-tinyml-submission\index.md + - app-mlperf-inference-nvidia: Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia/index.md + - reproduce-mlperf-octoml-tinyml-results: Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results/index.md + - reproduce-mlperf-training-nvidia: Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia/index.md + - wrapper-reproduce-octoml-tinyml-submission: Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission/index.md - Modular MLPerf training benchmark pipeline: - - app-mlperf-training-nvidia: Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-nvidia\index.md - - app-mlperf-training-reference: Modular-MLPerf-training-benchmark-pipeline\app-mlperf-training-reference\index.md + - app-mlperf-training-nvidia: Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia/index.md + - app-mlperf-training-reference: Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference/index.md - DevOps automation: - - benchmark-program: DevOps-automation\benchmark-program\index.md - - compile-program: DevOps-automation\compile-program\index.md - - convert-csv-to-md: DevOps-automation\convert-csv-to-md\index.md - - copy-to-clipboard: DevOps-automation\copy-to-clipboard\index.md - - create-conda-env: DevOps-automation\create-conda-env\index.md - - create-patch: DevOps-automation\create-patch\index.md - - detect-sudo: DevOps-automation\detect-sudo\index.md - - download-and-extract: DevOps-automation\download-and-extract\index.md - - download-file: DevOps-automation\download-file\index.md - - download-torrent: DevOps-automation\download-torrent\index.md - - extract-file: DevOps-automation\extract-file\index.md - - fail: DevOps-automation\fail\index.md - - get-conda: DevOps-automation\get-conda\index.md - - get-git-repo: DevOps-automation\get-git-repo\index.md - - get-github-cli: DevOps-automation\get-github-cli\index.md - - pull-git-repo: DevOps-automation\pull-git-repo\index.md - - push-csv-to-spreadsheet: DevOps-automation\push-csv-to-spreadsheet\index.md - - set-device-settings-qaic: DevOps-automation\set-device-settings-qaic\index.md - - set-echo-off-win: DevOps-automation\set-echo-off-win\index.md - - set-performance-mode: DevOps-automation\set-performance-mode\index.md - - set-sqlite-dir: DevOps-automation\set-sqlite-dir\index.md - - tar-my-folder: DevOps-automation\tar-my-folder\index.md + - benchmark-program: DevOps-automation/benchmark-program/index.md + - compile-program: DevOps-automation/compile-program/index.md + - convert-csv-to-md: DevOps-automation/convert-csv-to-md/index.md + - copy-to-clipboard: DevOps-automation/copy-to-clipboard/index.md + - create-conda-env: DevOps-automation/create-conda-env/index.md + - create-patch: DevOps-automation/create-patch/index.md + - detect-sudo: DevOps-automation/detect-sudo/index.md + - download-and-extract: DevOps-automation/download-and-extract/index.md + - download-file: DevOps-automation/download-file/index.md + - download-torrent: DevOps-automation/download-torrent/index.md + - extract-file: DevOps-automation/extract-file/index.md + - fail: DevOps-automation/fail/index.md + - get-conda: DevOps-automation/get-conda/index.md + - get-git-repo: DevOps-automation/get-git-repo/index.md + - get-github-cli: DevOps-automation/get-github-cli/index.md + - pull-git-repo: DevOps-automation/pull-git-repo/index.md + - push-csv-to-spreadsheet: DevOps-automation/push-csv-to-spreadsheet/index.md + - set-device-settings-qaic: DevOps-automation/set-device-settings-qaic/index.md + - set-echo-off-win: DevOps-automation/set-echo-off-win/index.md + - set-performance-mode: DevOps-automation/set-performance-mode/index.md + - set-sqlite-dir: DevOps-automation/set-sqlite-dir/index.md + - tar-my-folder: DevOps-automation/tar-my-folder/index.md - Docker automation: - - build-docker-image: Docker-automation\build-docker-image\index.md - - build-dockerfile: Docker-automation\build-dockerfile\index.md - - prune-docker: Docker-automation\prune-docker\index.md - - run-docker-container: Docker-automation\run-docker-container\index.md + - build-docker-image: Docker-automation/build-docker-image/index.md + - build-dockerfile: Docker-automation/build-dockerfile/index.md + - prune-docker: Docker-automation/prune-docker/index.md + - run-docker-container: Docker-automation/run-docker-container/index.md - AI-ML optimization: - - calibrate-model-for.qaic: AI-ML-optimization\calibrate-model-for.qaic\index.md - - compile-model-for.qaic: AI-ML-optimization\compile-model-for.qaic\index.md - - prune-bert-models: AI-ML-optimization\prune-bert-models\index.md + - calibrate-model-for.qaic: AI-ML-optimization/calibrate-model-for.qaic/index.md + - compile-model-for.qaic: AI-ML-optimization/compile-model-for.qaic/index.md + - prune-bert-models: AI-ML-optimization/prune-bert-models/index.md - AI-ML models: - - convert-ml-model-huggingface-to-onnx: AI-ML-models\convert-ml-model-huggingface-to-onnx\index.md - - get-bert-squad-vocab: AI-ML-models\get-bert-squad-vocab\index.md - - get-dlrm: AI-ML-models\get-dlrm\index.md - - get-ml-model-3d-unet-kits19: AI-ML-models\get-ml-model-3d-unet-kits19\index.md - - get-ml-model-bert-base-squad: AI-ML-models\get-ml-model-bert-base-squad\index.md - - get-ml-model-bert-large-squad: AI-ML-models\get-ml-model-bert-large-squad\index.md - - get-ml-model-dlrm-terabyte: AI-ML-models\get-ml-model-dlrm-terabyte\index.md - - get-ml-model-efficientnet-lite: AI-ML-models\get-ml-model-efficientnet-lite\index.md - - get-ml-model-gptj: AI-ML-models\get-ml-model-gptj\index.md - - get-ml-model-huggingface-zoo: AI-ML-models\get-ml-model-huggingface-zoo\index.md - - get-ml-model-llama2: AI-ML-models\get-ml-model-llama2\index.md - - get-ml-model-mobilenet: AI-ML-models\get-ml-model-mobilenet\index.md - - get-ml-model-neuralmagic-zoo: AI-ML-models\get-ml-model-neuralmagic-zoo\index.md - - get-ml-model-resnet50: AI-ML-models\get-ml-model-resnet50\index.md - - get-ml-model-retinanet: AI-ML-models\get-ml-model-retinanet\index.md - - get-ml-model-retinanet-nvidia: AI-ML-models\get-ml-model-retinanet-nvidia\index.md - - get-ml-model-rnnt: AI-ML-models\get-ml-model-rnnt\index.md - - get-ml-model-stable-diffusion: AI-ML-models\get-ml-model-stable-diffusion\index.md - - get-ml-model-tiny-resnet: AI-ML-models\get-ml-model-tiny-resnet\index.md - - get-ml-model-using-imagenet-from-model-zoo: AI-ML-models\get-ml-model-using-imagenet-from-model-zoo\index.md - - get-tvm-model: AI-ML-models\get-tvm-model\index.md + - convert-ml-model-huggingface-to-onnx: AI-ML-models/convert-ml-model-huggingface-to-onnx/index.md + - get-bert-squad-vocab: AI-ML-models/get-bert-squad-vocab/index.md + - get-dlrm: AI-ML-models/get-dlrm/index.md + - get-ml-model-3d-unet-kits19: AI-ML-models/get-ml-model-3d-unet-kits19/index.md + - get-ml-model-bert-base-squad: AI-ML-models/get-ml-model-bert-base-squad/index.md + - get-ml-model-bert-large-squad: AI-ML-models/get-ml-model-bert-large-squad/index.md + - get-ml-model-dlrm-terabyte: AI-ML-models/get-ml-model-dlrm-terabyte/index.md + - get-ml-model-efficientnet-lite: AI-ML-models/get-ml-model-efficientnet-lite/index.md + - get-ml-model-gptj: AI-ML-models/get-ml-model-gptj/index.md + - get-ml-model-huggingface-zoo: AI-ML-models/get-ml-model-huggingface-zoo/index.md + - get-ml-model-llama2: AI-ML-models/get-ml-model-llama2/index.md + - get-ml-model-mobilenet: AI-ML-models/get-ml-model-mobilenet/index.md + - get-ml-model-neuralmagic-zoo: AI-ML-models/get-ml-model-neuralmagic-zoo/index.md + - get-ml-model-resnet50: AI-ML-models/get-ml-model-resnet50/index.md + - get-ml-model-retinanet: AI-ML-models/get-ml-model-retinanet/index.md + - get-ml-model-retinanet-nvidia: AI-ML-models/get-ml-model-retinanet-nvidia/index.md + - get-ml-model-rnnt: AI-ML-models/get-ml-model-rnnt/index.md + - get-ml-model-stable-diffusion: AI-ML-models/get-ml-model-stable-diffusion/index.md + - get-ml-model-tiny-resnet: AI-ML-models/get-ml-model-tiny-resnet/index.md + - get-ml-model-using-imagenet-from-model-zoo: AI-ML-models/get-ml-model-using-imagenet-from-model-zoo/index.md + - get-tvm-model: AI-ML-models/get-tvm-model/index.md - CM automation: - - create-custom-cache-entry: CM-automation\create-custom-cache-entry\index.md + - create-custom-cache-entry: CM-automation/create-custom-cache-entry/index.md - TinyML automation: - - create-fpgaconvnet-app-tinyml: TinyML-automation\create-fpgaconvnet-app-tinyml\index.md - - create-fpgaconvnet-config-tinyml: TinyML-automation\create-fpgaconvnet-config-tinyml\index.md - - flash-tinyml-binary: TinyML-automation\flash-tinyml-binary\index.md - - get-microtvm: TinyML-automation\get-microtvm\index.md - - get-zephyr: TinyML-automation\get-zephyr\index.md - - get-zephyr-sdk: TinyML-automation\get-zephyr-sdk\index.md + - create-fpgaconvnet-app-tinyml: TinyML-automation/create-fpgaconvnet-app-tinyml/index.md + - create-fpgaconvnet-config-tinyml: TinyML-automation/create-fpgaconvnet-config-tinyml/index.md + - flash-tinyml-binary: TinyML-automation/flash-tinyml-binary/index.md + - get-microtvm: TinyML-automation/get-microtvm/index.md + - get-zephyr: TinyML-automation/get-zephyr/index.md + - get-zephyr-sdk: TinyML-automation/get-zephyr-sdk/index.md - Cloud automation: - - destroy-terraform: Cloud-automation\destroy-terraform\index.md - - get-aws-cli: Cloud-automation\get-aws-cli\index.md - - get-terraform: Cloud-automation\get-terraform\index.md - - install-aws-cli: Cloud-automation\install-aws-cli\index.md - - install-terraform-from-src: Cloud-automation\install-terraform-from-src\index.md - - run-terraform: Cloud-automation\run-terraform\index.md + - destroy-terraform: Cloud-automation/destroy-terraform/index.md + - get-aws-cli: Cloud-automation/get-aws-cli/index.md + - get-terraform: Cloud-automation/get-terraform/index.md + - install-aws-cli: Cloud-automation/install-aws-cli/index.md + - install-terraform-from-src: Cloud-automation/install-terraform-from-src/index.md + - run-terraform: Cloud-automation/run-terraform/index.md - Platform information: - - detect-cpu: Platform-information\detect-cpu\index.md - - detect-os: Platform-information\detect-os\index.md + - detect-cpu: Platform-information/detect-cpu/index.md + - detect-os: Platform-information/detect-os/index.md - Detection or installation of tools and artifacts: - - get-android-sdk: Detection-or-installation-of-tools-and-artifacts\get-android-sdk\index.md - - get-aria2: Detection-or-installation-of-tools-and-artifacts\get-aria2\index.md - - get-bazel: Detection-or-installation-of-tools-and-artifacts\get-bazel\index.md - - get-blis: Detection-or-installation-of-tools-and-artifacts\get-blis\index.md - - get-brew: Detection-or-installation-of-tools-and-artifacts\get-brew\index.md - - get-cmake: Detection-or-installation-of-tools-and-artifacts\get-cmake\index.md - - get-cmsis_5: Detection-or-installation-of-tools-and-artifacts\get-cmsis_5\index.md - - get-docker: Detection-or-installation-of-tools-and-artifacts\get-docker\index.md - - get-generic-sys-util: Detection-or-installation-of-tools-and-artifacts\get-generic-sys-util\index.md - - get-google-test: Detection-or-installation-of-tools-and-artifacts\get-google-test\index.md - - get-java: Detection-or-installation-of-tools-and-artifacts\get-java\index.md - - get-javac: Detection-or-installation-of-tools-and-artifacts\get-javac\index.md - - get-lib-armnn: Detection-or-installation-of-tools-and-artifacts\get-lib-armnn\index.md - - get-lib-dnnl: Detection-or-installation-of-tools-and-artifacts\get-lib-dnnl\index.md - - get-lib-protobuf: Detection-or-installation-of-tools-and-artifacts\get-lib-protobuf\index.md - - get-lib-qaic-api: Detection-or-installation-of-tools-and-artifacts\get-lib-qaic-api\index.md - - get-nvidia-docker: Detection-or-installation-of-tools-and-artifacts\get-nvidia-docker\index.md - - get-openssl: Detection-or-installation-of-tools-and-artifacts\get-openssl\index.md - - get-rclone: Detection-or-installation-of-tools-and-artifacts\get-rclone\index.md - - get-sys-utils-cm: Detection-or-installation-of-tools-and-artifacts\get-sys-utils-cm\index.md - - get-sys-utils-min: Detection-or-installation-of-tools-and-artifacts\get-sys-utils-min\index.md - - get-xilinx-sdk: Detection-or-installation-of-tools-and-artifacts\get-xilinx-sdk\index.md - - get-zendnn: Detection-or-installation-of-tools-and-artifacts\get-zendnn\index.md - - install-bazel: Detection-or-installation-of-tools-and-artifacts\install-bazel\index.md - - install-cmake-prebuilt: Detection-or-installation-of-tools-and-artifacts\install-cmake-prebuilt\index.md - - install-gflags: Detection-or-installation-of-tools-and-artifacts\install-gflags\index.md - - install-github-cli: Detection-or-installation-of-tools-and-artifacts\install-github-cli\index.md - - install-numactl-from-src: Detection-or-installation-of-tools-and-artifacts\install-numactl-from-src\index.md - - install-openssl: Detection-or-installation-of-tools-and-artifacts\install-openssl\index.md + - get-android-sdk: Detection-or-installation-of-tools-and-artifacts/get-android-sdk/index.md + - get-aria2: Detection-or-installation-of-tools-and-artifacts/get-aria2/index.md + - get-bazel: Detection-or-installation-of-tools-and-artifacts/get-bazel/index.md + - get-blis: Detection-or-installation-of-tools-and-artifacts/get-blis/index.md + - get-brew: Detection-or-installation-of-tools-and-artifacts/get-brew/index.md + - get-cmake: Detection-or-installation-of-tools-and-artifacts/get-cmake/index.md + - get-cmsis_5: Detection-or-installation-of-tools-and-artifacts/get-cmsis_5/index.md + - get-docker: Detection-or-installation-of-tools-and-artifacts/get-docker/index.md + - get-generic-sys-util: Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util/index.md + - get-google-test: Detection-or-installation-of-tools-and-artifacts/get-google-test/index.md + - get-java: Detection-or-installation-of-tools-and-artifacts/get-java/index.md + - get-javac: Detection-or-installation-of-tools-and-artifacts/get-javac/index.md + - get-lib-armnn: Detection-or-installation-of-tools-and-artifacts/get-lib-armnn/index.md + - get-lib-dnnl: Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl/index.md + - get-lib-protobuf: Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf/index.md + - get-lib-qaic-api: Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api/index.md + - get-nvidia-docker: Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker/index.md + - get-openssl: Detection-or-installation-of-tools-and-artifacts/get-openssl/index.md + - get-rclone: Detection-or-installation-of-tools-and-artifacts/get-rclone/index.md + - get-sys-utils-cm: Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm/index.md + - get-sys-utils-min: Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min/index.md + - get-xilinx-sdk: Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk/index.md + - get-zendnn: Detection-or-installation-of-tools-and-artifacts/get-zendnn/index.md + - install-bazel: Detection-or-installation-of-tools-and-artifacts/install-bazel/index.md + - install-cmake-prebuilt: Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt/index.md + - install-gflags: Detection-or-installation-of-tools-and-artifacts/install-gflags/index.md + - install-github-cli: Detection-or-installation-of-tools-and-artifacts/install-github-cli/index.md + - install-numactl-from-src: Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src/index.md + - install-openssl: Detection-or-installation-of-tools-and-artifacts/install-openssl/index.md - Compiler automation: - - get-aocl: Compiler-automation\get-aocl\index.md - - get-cl: Compiler-automation\get-cl\index.md - - get-compiler-flags: Compiler-automation\get-compiler-flags\index.md - - get-compiler-rust: Compiler-automation\get-compiler-rust\index.md - - get-gcc: Compiler-automation\get-gcc\index.md - - get-go: Compiler-automation\get-go\index.md - - get-llvm: Compiler-automation\get-llvm\index.md - - install-gcc-src: Compiler-automation\install-gcc-src\index.md - - install-ipex-from-src: Compiler-automation\install-ipex-from-src\index.md - - install-llvm-prebuilt: Compiler-automation\install-llvm-prebuilt\index.md - - install-llvm-src: Compiler-automation\install-llvm-src\index.md - - install-onednn-from-src: Compiler-automation\install-onednn-from-src\index.md - - install-onnxruntime-from-src: Compiler-automation\install-onnxruntime-from-src\index.md - - install-pytorch-from-src: Compiler-automation\install-pytorch-from-src\index.md - - install-pytorch-kineto-from-src: Compiler-automation\install-pytorch-kineto-from-src\index.md - - install-torchvision-from-src: Compiler-automation\install-torchvision-from-src\index.md - - install-tpp-pytorch-extension: Compiler-automation\install-tpp-pytorch-extension\index.md - - install-transformers-from-src: Compiler-automation\install-transformers-from-src\index.md + - get-aocl: Compiler-automation/get-aocl/index.md + - get-cl: Compiler-automation/get-cl/index.md + - get-compiler-flags: Compiler-automation/get-compiler-flags/index.md + - get-compiler-rust: Compiler-automation/get-compiler-rust/index.md + - get-gcc: Compiler-automation/get-gcc/index.md + - get-go: Compiler-automation/get-go/index.md + - get-llvm: Compiler-automation/get-llvm/index.md + - install-gcc-src: Compiler-automation/install-gcc-src/index.md + - install-ipex-from-src: Compiler-automation/install-ipex-from-src/index.md + - install-llvm-prebuilt: Compiler-automation/install-llvm-prebuilt/index.md + - install-llvm-src: Compiler-automation/install-llvm-src/index.md + - install-onednn-from-src: Compiler-automation/install-onednn-from-src/index.md + - install-onnxruntime-from-src: Compiler-automation/install-onnxruntime-from-src/index.md + - install-pytorch-from-src: Compiler-automation/install-pytorch-from-src/index.md + - install-pytorch-kineto-from-src: Compiler-automation/install-pytorch-kineto-from-src/index.md + - install-torchvision-from-src: Compiler-automation/install-torchvision-from-src/index.md + - install-tpp-pytorch-extension: Compiler-automation/install-tpp-pytorch-extension/index.md + - install-transformers-from-src: Compiler-automation/install-transformers-from-src/index.md - CM Interface: - - get-cache-dir: CM-Interface\get-cache-dir\index.md + - get-cache-dir: CM-Interface/get-cache-dir/index.md - Legacy CK support: - - get-ck: Legacy-CK-support\get-ck\index.md - - get-ck-repo-mlops: Legacy-CK-support\get-ck-repo-mlops\index.md + - get-ck: Legacy-CK-support/get-ck/index.md + - get-ck-repo-mlops: Legacy-CK-support/get-ck-repo-mlops/index.md - AI-ML datasets: - - get-croissant: AI-ML-datasets\get-croissant\index.md - - get-dataset-cifar10: AI-ML-datasets\get-dataset-cifar10\index.md - - get-dataset-cnndm: AI-ML-datasets\get-dataset-cnndm\index.md - - get-dataset-coco: AI-ML-datasets\get-dataset-coco\index.md - - get-dataset-coco2014: AI-ML-datasets\get-dataset-coco2014\index.md - - get-dataset-criteo: AI-ML-datasets\get-dataset-criteo\index.md - - get-dataset-imagenet-aux: AI-ML-datasets\get-dataset-imagenet-aux\index.md - - get-dataset-imagenet-calibration: AI-ML-datasets\get-dataset-imagenet-calibration\index.md - - get-dataset-imagenet-helper: AI-ML-datasets\get-dataset-imagenet-helper\index.md - - get-dataset-imagenet-train: AI-ML-datasets\get-dataset-imagenet-train\index.md - - get-dataset-imagenet-val: AI-ML-datasets\get-dataset-imagenet-val\index.md - - get-dataset-kits19: AI-ML-datasets\get-dataset-kits19\index.md - - get-dataset-librispeech: AI-ML-datasets\get-dataset-librispeech\index.md - - get-dataset-openimages: AI-ML-datasets\get-dataset-openimages\index.md - - get-dataset-openimages-annotations: AI-ML-datasets\get-dataset-openimages-annotations\index.md - - get-dataset-openimages-calibration: AI-ML-datasets\get-dataset-openimages-calibration\index.md - - get-dataset-openorca: AI-ML-datasets\get-dataset-openorca\index.md - - get-dataset-squad: AI-ML-datasets\get-dataset-squad\index.md - - get-dataset-squad-vocab: AI-ML-datasets\get-dataset-squad-vocab\index.md - - get-preprocessed-dataset-criteo: AI-ML-datasets\get-preprocessed-dataset-criteo\index.md - - get-preprocessed-dataset-generic: AI-ML-datasets\get-preprocessed-dataset-generic\index.md - - get-preprocessed-dataset-imagenet: AI-ML-datasets\get-preprocessed-dataset-imagenet\index.md - - get-preprocessed-dataset-kits19: AI-ML-datasets\get-preprocessed-dataset-kits19\index.md - - get-preprocessed-dataset-librispeech: AI-ML-datasets\get-preprocessed-dataset-librispeech\index.md - - get-preprocessed-dataset-openimages: AI-ML-datasets\get-preprocessed-dataset-openimages\index.md - - get-preprocessed-dataset-openorca: AI-ML-datasets\get-preprocessed-dataset-openorca\index.md - - get-preprocessed-dataset-squad: AI-ML-datasets\get-preprocessed-dataset-squad\index.md + - get-croissant: AI-ML-datasets/get-croissant/index.md + - get-dataset-cifar10: AI-ML-datasets/get-dataset-cifar10/index.md + - get-dataset-cnndm: AI-ML-datasets/get-dataset-cnndm/index.md + - get-dataset-coco: AI-ML-datasets/get-dataset-coco/index.md + - get-dataset-coco2014: AI-ML-datasets/get-dataset-coco2014/index.md + - get-dataset-criteo: AI-ML-datasets/get-dataset-criteo/index.md + - get-dataset-imagenet-aux: AI-ML-datasets/get-dataset-imagenet-aux/index.md + - get-dataset-imagenet-calibration: AI-ML-datasets/get-dataset-imagenet-calibration/index.md + - get-dataset-imagenet-helper: AI-ML-datasets/get-dataset-imagenet-helper/index.md + - get-dataset-imagenet-train: AI-ML-datasets/get-dataset-imagenet-train/index.md + - get-dataset-imagenet-val: AI-ML-datasets/get-dataset-imagenet-val/index.md + - get-dataset-kits19: AI-ML-datasets/get-dataset-kits19/index.md + - get-dataset-librispeech: AI-ML-datasets/get-dataset-librispeech/index.md + - get-dataset-openimages: AI-ML-datasets/get-dataset-openimages/index.md + - get-dataset-openimages-annotations: AI-ML-datasets/get-dataset-openimages-annotations/index.md + - get-dataset-openimages-calibration: AI-ML-datasets/get-dataset-openimages-calibration/index.md + - get-dataset-openorca: AI-ML-datasets/get-dataset-openorca/index.md + - get-dataset-squad: AI-ML-datasets/get-dataset-squad/index.md + - get-dataset-squad-vocab: AI-ML-datasets/get-dataset-squad-vocab/index.md + - get-preprocessed-dataset-criteo: AI-ML-datasets/get-preprocessed-dataset-criteo/index.md + - get-preprocessed-dataset-generic: AI-ML-datasets/get-preprocessed-dataset-generic/index.md + - get-preprocessed-dataset-imagenet: AI-ML-datasets/get-preprocessed-dataset-imagenet/index.md + - get-preprocessed-dataset-kits19: AI-ML-datasets/get-preprocessed-dataset-kits19/index.md + - get-preprocessed-dataset-librispeech: AI-ML-datasets/get-preprocessed-dataset-librispeech/index.md + - get-preprocessed-dataset-openimages: AI-ML-datasets/get-preprocessed-dataset-openimages/index.md + - get-preprocessed-dataset-openorca: AI-ML-datasets/get-preprocessed-dataset-openorca/index.md + - get-preprocessed-dataset-squad: AI-ML-datasets/get-preprocessed-dataset-squad/index.md - CUDA automation: - - get-cuda: CUDA-automation\get-cuda\index.md - - get-cuda-devices: CUDA-automation\get-cuda-devices\index.md - - get-cudnn: CUDA-automation\get-cudnn\index.md - - get-tensorrt: CUDA-automation\get-tensorrt\index.md - - install-cuda-package-manager: CUDA-automation\install-cuda-package-manager\index.md - - install-cuda-prebuilt: CUDA-automation\install-cuda-prebuilt\index.md + - get-cuda: CUDA-automation/get-cuda/index.md + - get-cuda-devices: CUDA-automation/get-cuda-devices/index.md + - get-cudnn: CUDA-automation/get-cudnn/index.md + - get-tensorrt: CUDA-automation/get-tensorrt/index.md + - install-cuda-package-manager: CUDA-automation/install-cuda-package-manager/index.md + - install-cuda-prebuilt: CUDA-automation/install-cuda-prebuilt/index.md - AI-ML frameworks: - - get-google-saxml: AI-ML-frameworks\get-google-saxml\index.md - - get-onnxruntime-prebuilt: AI-ML-frameworks\get-onnxruntime-prebuilt\index.md - - get-qaic-apps-sdk: AI-ML-frameworks\get-qaic-apps-sdk\index.md - - get-qaic-platform-sdk: AI-ML-frameworks\get-qaic-platform-sdk\index.md - - get-qaic-software-kit: AI-ML-frameworks\get-qaic-software-kit\index.md - - get-rocm: AI-ML-frameworks\get-rocm\index.md - - get-tvm: AI-ML-frameworks\get-tvm\index.md - - install-qaic-compute-sdk-from-src: AI-ML-frameworks\install-qaic-compute-sdk-from-src\index.md - - install-rocm: AI-ML-frameworks\install-rocm\index.md - - install-tensorflow-for-c: AI-ML-frameworks\install-tensorflow-for-c\index.md - - install-tensorflow-from-src: AI-ML-frameworks\install-tensorflow-from-src\index.md - - install-tflite-from-src: AI-ML-frameworks\install-tflite-from-src\index.md + - get-google-saxml: AI-ML-frameworks/get-google-saxml/index.md + - get-onnxruntime-prebuilt: AI-ML-frameworks/get-onnxruntime-prebuilt/index.md + - get-qaic-apps-sdk: AI-ML-frameworks/get-qaic-apps-sdk/index.md + - get-qaic-platform-sdk: AI-ML-frameworks/get-qaic-platform-sdk/index.md + - get-qaic-software-kit: AI-ML-frameworks/get-qaic-software-kit/index.md + - get-rocm: AI-ML-frameworks/get-rocm/index.md + - get-tvm: AI-ML-frameworks/get-tvm/index.md + - install-qaic-compute-sdk-from-src: AI-ML-frameworks/install-qaic-compute-sdk-from-src/index.md + - install-rocm: AI-ML-frameworks/install-rocm/index.md + - install-tensorflow-for-c: AI-ML-frameworks/install-tensorflow-for-c/index.md + - install-tensorflow-from-src: AI-ML-frameworks/install-tensorflow-from-src/index.md + - install-tflite-from-src: AI-ML-frameworks/install-tflite-from-src/index.md - Reproducibility and artifact evaluation: - - get-ipol-src: Reproducibility-and-artifact-evaluation\get-ipol-src\index.md - - process-ae-users: Reproducibility-and-artifact-evaluation\process-ae-users\index.md - - reproduce-ipol-paper-2022-439: Reproducibility-and-artifact-evaluation\reproduce-ipol-paper-2022-439\index.md - - reproduce-micro-paper-2023-victima: Reproducibility-and-artifact-evaluation\reproduce-micro-paper-2023-victima\index.md + - get-ipol-src: Reproducibility-and-artifact-evaluation/get-ipol-src/index.md + - process-ae-users: Reproducibility-and-artifact-evaluation/process-ae-users/index.md + - reproduce-ipol-paper-2022-439: Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439/index.md + - reproduce-micro-paper-2023-victima: Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima/index.md - GUI: - - gui: GUI\gui\index.md + - gui: GUI/gui/index.md - Collective benchmarking: - - launch-benchmark: Collective-benchmarking\launch-benchmark\index.md + - launch-benchmark: Collective-benchmarking/launch-benchmark/index.md - Tests: - - print-any-text: Tests\print-any-text\index.md - - print-croissant-desc: Tests\print-croissant-desc\index.md - - print-hello-world: Tests\print-hello-world\index.md - - print-hello-world-java: Tests\print-hello-world-java\index.md - - print-hello-world-javac: Tests\print-hello-world-javac\index.md - - print-hello-world-py: Tests\print-hello-world-py\index.md - - print-python-version: Tests\print-python-version\index.md - - run-python: Tests\run-python\index.md - - test-cm-core: Tests\test-cm-core\index.md - - test-cm-script-pipeline: Tests\test-cm-script-pipeline\index.md - - test-deps-conditions: Tests\test-deps-conditions\index.md - - test-deps-conditions2: Tests\test-deps-conditions2\index.md - - test-download-and-extract-artifacts: Tests\test-download-and-extract-artifacts\index.md - - test-set-sys-user-cm: Tests\test-set-sys-user-cm\index.md - - upgrade-python-pip: Tests\upgrade-python-pip\index.md + - print-any-text: Tests/print-any-text/index.md + - print-croissant-desc: Tests/print-croissant-desc/index.md + - print-hello-world: Tests/print-hello-world/index.md + - print-hello-world-java: Tests/print-hello-world-java/index.md + - print-hello-world-javac: Tests/print-hello-world-javac/index.md + - print-hello-world-py: Tests/print-hello-world-py/index.md + - print-python-version: Tests/print-python-version/index.md + - run-python: Tests/run-python/index.md + - test-cm-core: Tests/test-cm-core/index.md + - test-cm-script-pipeline: Tests/test-cm-script-pipeline/index.md + - test-deps-conditions: Tests/test-deps-conditions/index.md + - test-deps-conditions2: Tests/test-deps-conditions2/index.md + - test-download-and-extract-artifacts: Tests/test-download-and-extract-artifacts/index.md + - test-set-sys-user-cm: Tests/test-set-sys-user-cm/index.md + - upgrade-python-pip: Tests/upgrade-python-pip/index.md - Dashboard automation: - - publish-results-to-dashboard: Dashboard-automation\publish-results-to-dashboard\index.md + - publish-results-to-dashboard: Dashboard-automation/publish-results-to-dashboard/index.md - Remote automation: - - remote-run-commands: Remote-automation\remote-run-commands\index.md + - remote-run-commands: Remote-automation/remote-run-commands/index.md - CM interface prototyping: - - test-debug: CM-interface-prototyping\test-debug\index.md - - test-mlperf-inference-retinanet: CM-interface-prototyping\test-mlperf-inference-retinanet\index.md + - test-debug: CM-interface-prototyping/test-debug/index.md + - test-mlperf-inference-retinanet: CM-interface-prototyping/test-mlperf-inference-retinanet/index.md markdown_extensions: - pymdownx.tasklist: From 5e0201a06585e00e0fe233d1042eb551d9b1dd04 Mon Sep 17 00:00:00 2001 From: ANANDHU S <71482562+anandhu-eng@users.noreply.github.com> Date: Sat, 25 May 2024 01:04:03 +0530 Subject: [PATCH 7/7] Reorganised pages - Sep categ pages --- docs/AI-ML-datasets/get-croissant/index.md | 126 -- .../get-dataset-cifar10/index.md | 164 -- .../AI-ML-datasets/get-dataset-cnndm/index.md | 175 --- docs/AI-ML-datasets/get-dataset-coco/index.md | 215 --- .../get-dataset-coco2014/index.md | 204 --- .../get-dataset-criteo/index.md | 154 -- .../get-dataset-imagenet-aux/index.md | 155 -- .../get-dataset-imagenet-calibration/index.md | 146 -- .../get-dataset-imagenet-helper/index.md | 120 -- .../get-dataset-imagenet-train/index.md | 149 -- .../get-dataset-imagenet-val/index.md | 211 --- .../get-dataset-kits19/index.md | 172 --- .../get-dataset-librispeech/index.md | 134 -- .../index.md | 144 -- .../index.md | 178 --- .../get-dataset-openimages/index.md | 250 ---- .../get-dataset-openorca/index.md | 173 --- .../get-dataset-squad-vocab/index.md | 142 -- .../AI-ML-datasets/get-dataset-squad/index.md | 129 -- .../get-preprocessed-dataset-criteo/index.md | 226 --- .../get-preprocessed-dataset-generic/index.md | 117 -- .../index.md | 456 ------ .../get-preprocessed-dataset-kits19/index.md | 232 --- .../index.md | 222 --- .../index.md | 401 ----- .../index.md | 178 --- .../get-preprocessed-dataset-squad/index.md | 238 --- docs/AI-ML-datasets/index.md | 29 + .../get-google-saxml/index.md | 133 -- .../get-onnxruntime-prebuilt/index.md | 157 -- .../get-qaic-apps-sdk/index.md | 124 -- .../get-qaic-platform-sdk/index.md | 128 -- .../get-qaic-software-kit/index.md | 176 --- docs/AI-ML-frameworks/get-rocm/index.md | 126 -- docs/AI-ML-frameworks/get-tvm/index.md | 198 --- docs/AI-ML-frameworks/index.md | 14 + .../index.md | 199 --- docs/AI-ML-frameworks/install-rocm/index.md | 129 -- .../install-tensorflow-for-c/index.md | 122 -- .../install-tensorflow-from-src/index.md | 165 -- .../install-tflite-from-src/index.md | 135 -- .../index.md | 143 -- .../get-bert-squad-vocab/index.md | 119 -- docs/AI-ML-models/get-dlrm/index.md | 143 -- .../get-ml-model-3d-unet-kits19/index.md | 200 --- .../get-ml-model-bert-base-squad/index.md | 183 --- .../get-ml-model-bert-large-squad/index.md | 357 ----- .../get-ml-model-dlrm-terabyte/index.md | 262 ---- .../get-ml-model-efficientnet-lite/index.md | 248 --- docs/AI-ML-models/get-ml-model-gptj/index.md | 321 ---- .../get-ml-model-huggingface-zoo/index.md | 192 --- .../AI-ML-models/get-ml-model-llama2/index.md | 222 --- .../get-ml-model-mobilenet/index.md | 470 ------ .../get-ml-model-neuralmagic-zoo/index.md | 335 ----- .../get-ml-model-resnet50/index.md | 356 ----- .../get-ml-model-retinanet-nvidia/index.md | 172 --- .../get-ml-model-retinanet/index.md | 225 --- docs/AI-ML-models/get-ml-model-rnnt/index.md | 192 --- .../get-ml-model-stable-diffusion/index.md | 256 ---- .../get-ml-model-tiny-resnet/index.md | 213 --- .../index.md | 147 -- docs/AI-ML-models/get-tvm-model/index.md | 288 ---- docs/AI-ML-models/index.md | 23 + .../calibrate-model-for.qaic/index.md | 289 ---- .../compile-model-for.qaic/index.md | 438 ------ docs/AI-ML-optimization/index.md | 5 + .../prune-bert-models/index.md | 185 --- docs/CM-Interface/index.md | 3 + docs/CM-automation/index.md | 3 + docs/CM-interface-prototyping/index.md | 4 + .../test-mlperf-inference-retinanet/index.md | 135 -- .../CUDA-automation/get-cuda-devices/index.md | 122 -- docs/CUDA-automation/get-cuda/index.md | 230 --- docs/CUDA-automation/get-cudnn/index.md | 167 --- docs/CUDA-automation/get-tensorrt/index.md | 176 --- docs/CUDA-automation/index.md | 8 + .../install-cuda-package-manager/index.md | 124 -- .../install-cuda-prebuilt/index.md | 180 --- .../destroy-terraform/index.md | 121 -- docs/Cloud-automation/get-aws-cli/index.md | 125 -- docs/Cloud-automation/get-terraform/index.md | 126 -- docs/Cloud-automation/index.md | 8 + .../Cloud-automation/install-aws-cli/index.md | 123 -- .../install-terraform-from-src/index.md | 130 -- docs/Cloud-automation/run-terraform/index.md | 481 ------ docs/Collective-benchmarking/index.md | 3 + .../launch-benchmark/index.md | 116 -- docs/Compiler-automation/get-aocl/index.md | 137 -- docs/Compiler-automation/get-cl/index.md | 138 -- .../get-compiler-flags/index.md | 130 -- .../get-compiler-rust/index.md | 120 -- docs/Compiler-automation/get-gcc/index.md | 154 -- docs/Compiler-automation/get-go/index.md | 126 -- docs/Compiler-automation/get-llvm/index.md | 175 --- docs/Compiler-automation/index.md | 20 + .../install-gcc-src/index.md | 127 -- .../install-ipex-from-src/index.md | 198 --- .../install-llvm-prebuilt/index.md | 137 -- .../install-llvm-src/index.md | 292 ---- .../install-onednn-from-src/index.md | 181 --- .../install-onnxruntime-from-src/index.md | 184 --- .../install-pytorch-from-src/index.md | 248 --- .../install-pytorch-kineto-from-src/index.md | 191 --- .../install-torchvision-from-src/index.md | 194 --- .../install-tpp-pytorch-extension/index.md | 198 --- .../install-transformers-from-src/index.md | 196 --- docs/Dashboard-automation/index.md | 3 + .../publish-results-to-dashboard/index.md | 123 -- .../get-android-sdk/index.md | 151 -- .../get-aria2/index.md | 148 -- .../get-bazel/index.md | 127 -- .../get-blis/index.md | 158 -- .../get-brew/index.md | 117 -- .../get-cmake/index.md | 130 -- .../get-cmsis_5/index.md | 149 -- .../get-docker/index.md | 119 -- .../get-generic-sys-util/index.md | 227 --- .../get-google-test/index.md | 137 -- .../get-java/index.md | 165 -- .../get-javac/index.md | 168 --- .../get-lib-armnn/index.md | 132 -- .../get-lib-dnnl/index.md | 132 -- .../get-lib-protobuf/index.md | 154 -- .../get-lib-qaic-api/index.md | 131 -- .../get-nvidia-docker/index.md | 121 -- .../get-openssl/index.md | 125 -- .../get-rclone/index.md | 150 -- .../get-sys-utils-cm/index.md | 156 -- .../get-sys-utils-min/index.md | 117 -- .../get-xilinx-sdk/index.md | 138 -- .../get-zendnn/index.md | 127 -- .../index.md | 31 + .../install-bazel/index.md | 134 -- .../install-cmake-prebuilt/index.md | 136 -- .../install-gflags/index.md | 127 -- .../install-github-cli/index.md | 121 -- .../install-numactl-from-src/index.md | 170 --- .../install-openssl/index.md | 134 -- .../benchmark-program/index.md | 151 -- .../compile-program/index.md | 128 -- .../convert-csv-to-md/index.md | 143 -- .../copy-to-clipboard/index.md | 141 -- .../create-conda-env/index.md | 148 -- docs/DevOps-automation/create-patch/index.md | 135 -- docs/DevOps-automation/detect-sudo/index.md | 120 -- .../download-and-extract/index.md | 216 --- docs/DevOps-automation/download-file/index.md | 202 --- .../download-torrent/index.md | 155 -- docs/DevOps-automation/extract-file/index.md | 168 --- docs/DevOps-automation/fail/index.md | 132 -- docs/DevOps-automation/get-conda/index.md | 164 -- docs/DevOps-automation/get-git-repo/index.md | 240 --- .../DevOps-automation/get-github-cli/index.md | 120 -- docs/DevOps-automation/index.md | 24 + docs/DevOps-automation/pull-git-repo/index.md | 134 -- .../push-csv-to-spreadsheet/index.md | 142 -- .../set-device-settings-qaic/index.md | 143 -- .../set-echo-off-win/index.md | 116 -- .../set-performance-mode/index.md | 180 --- .../DevOps-automation/set-sqlite-dir/index.md | 141 -- docs/DevOps-automation/tar-my-folder/index.md | 133 -- .../build-docker-image/index.md | 160 -- .../build-dockerfile/index.md | 186 --- docs/Docker-automation/index.md | 6 + docs/Docker-automation/prune-docker/index.md | 118 -- .../run-docker-container/index.md | 166 -- docs/GUI/gui/index.md | 243 --- docs/GUI/index.md | 3 + .../get-ck-repo-mlops/index.md | 120 -- docs/Legacy-CK-support/get-ck/index.md | 118 -- docs/Legacy-CK-support/index.md | 4 + .../add-custom-nvidia-system/index.md | 175 --- .../index.md | 268 ---- .../index.md | 248 --- .../index.md | 191 --- .../index.md | 199 --- .../generate-mlperf-tiny-report/index.md | 145 -- .../generate-mlperf-tiny-submission/index.md | 414 ----- .../generate-nvidia-engine/index.md | 244 --- .../index.md | 161 -- .../get-mlperf-inference-loadgen/index.md | 224 --- .../index.md | 150 -- .../index.md | 162 -- .../get-mlperf-inference-results-dir/index.md | 159 -- .../get-mlperf-inference-results/index.md | 163 -- .../get-mlperf-inference-src/index.md | 266 ---- .../index.md | 159 -- .../get-mlperf-inference-sut-configs/index.md | 161 -- .../index.md | 159 -- .../get-mlperf-logging/index.md | 127 -- .../get-mlperf-power-dev/index.md | 171 --- .../index.md | 129 -- .../get-mlperf-tiny-src/index.md | 143 -- .../get-mlperf-training-nvidia-code/index.md | 158 -- .../get-mlperf-training-src/index.md | 224 --- .../get-nvidia-mitten/index.md | 132 -- .../get-spec-ptd/index.md | 164 -- .../index.md | 152 -- .../import-mlperf-tiny-to-experiment/index.md | 135 -- .../index.md | 141 -- docs/MLPerf-benchmark-support/index.md | 44 + .../install-mlperf-logging-from-src/index.md | 126 -- .../prepare-training-data-bert/index.md | 193 --- .../prepare-training-data-resnet/index.md | 206 --- .../index.md | 144 -- .../process-mlperf-accuracy/index.md | 334 ----- .../index.md | 150 -- .../run-all-mlperf-models/index.md | 237 --- .../index.md | 383 ----- .../index.md | 199 --- .../run-mlperf-power-client/index.md | 154 -- .../run-mlperf-power-server/index.md | 165 -- .../index.md | 181 --- .../index.md | 145 -- .../app-image-classification-onnx-py/index.md | 213 --- .../index.md | 133 -- .../index.md | 170 --- .../index.md | 158 -- .../app-stable-diffusion-onnx-py/index.md | 203 --- .../index.md | 7 + .../app-mlperf-inference-dummy/index.md | 360 ----- .../app-mlperf-inference-intel/index.md | 621 -------- .../app-mlperf-inference-qualcomm/index.md | 775 ---------- docs/Modular-MLPerf-benchmarks/index.md | 5 + .../app-loadgen-generic-python/index.md | 331 ---- .../index.md | 382 ----- .../index.md | 336 ----- .../index.md | 944 ------------ .../app-mlperf-inference/index.md | 805 ---------- .../benchmark-program-mlperf/index.md | 152 -- .../index.md | 9 + .../run-mlperf-inference-app/index.md | 405 ----- .../app-mlperf-training-nvidia/index.md | 242 --- .../app-mlperf-training-reference/index.md | 240 --- .../index.md | 4 + .../app-image-corner-detection/index.md | 129 -- docs/Modular-application-pipeline/index.md | 3 + docs/Platform-information/detect-cpu/index.md | 128 -- docs/Platform-information/detect-os/index.md | 138 -- docs/Platform-information/index.md | 4 + .../activate-python-venv/index.md | 121 -- .../get-generic-python-lib/index.md | 681 --------- docs/Python-automation/get-python3/index.md | 169 --- docs/Python-automation/index.md | 8 + .../install-generic-conda-package/index.md | 158 -- .../install-python-src/index.md | 182 --- .../install-python-venv/index.md | 152 -- docs/Remote-automation/index.md | 3 + .../remote-run-commands/index.md | 145 -- .../app-mlperf-inference-nvidia/index.md | 1333 ----------------- docs/Reproduce-MLPerf-benchmarks/index.md | 6 + .../index.md | 214 --- .../reproduce-mlperf-training-nvidia/index.md | 169 --- .../index.md | 140 -- .../get-ipol-src/index.md | 146 -- .../index.md | 6 + .../process-ae-users/index.md | 136 -- .../reproduce-ipol-paper-2022-439/index.md | 148 -- .../index.md | 179 --- docs/Tests/index.md | 17 + docs/Tests/print-croissant-desc/index.md | 144 -- docs/Tests/print-hello-world-java/index.md | 123 -- docs/Tests/print-hello-world-javac/index.md | 123 -- docs/Tests/print-hello-world-py/index.md | 129 -- docs/Tests/print-hello-world/index.md | 155 -- docs/Tests/print-python-version/index.md | 121 -- docs/Tests/run-python/index.md | 138 -- docs/Tests/test-deps-conditions/index.md | 151 -- .../index.md | 123 -- docs/Tests/test-set-sys-user-cm/index.md | 118 -- docs/Tests/upgrade-python-pip/index.md | 123 -- .../create-fpgaconvnet-app-tinyml/index.md | 156 -- .../create-fpgaconvnet-config-tinyml/index.md | 173 --- .../flash-tinyml-binary/index.md | 175 --- docs/TinyML-automation/get-microtvm/index.md | 162 -- .../TinyML-automation/get-zephyr-sdk/index.md | 126 -- docs/TinyML-automation/get-zephyr/index.md | 132 -- docs/TinyML-automation/index.md | 8 + docs/index.md | 52 +- mkdocs.yml | 317 +--- mkdocsHelper.py | 87 -- 281 files changed, 397 insertions(+), 48753 deletions(-) delete mode 100644 docs/AI-ML-datasets/get-croissant/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-cifar10/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-cnndm/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-coco/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-coco2014/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-criteo/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-imagenet-aux/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-imagenet-calibration/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-imagenet-helper/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-imagenet-train/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-imagenet-val/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-kits19/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-librispeech/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-openimages-annotations/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-openimages-calibration/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-openimages/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-openorca/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-squad-vocab/index.md delete mode 100644 docs/AI-ML-datasets/get-dataset-squad/index.md delete mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-criteo/index.md delete mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-generic/index.md delete mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-imagenet/index.md delete mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-kits19/index.md delete mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-librispeech/index.md delete mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-openimages/index.md delete mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-openorca/index.md delete mode 100644 docs/AI-ML-datasets/get-preprocessed-dataset-squad/index.md create mode 100644 docs/AI-ML-datasets/index.md delete mode 100644 docs/AI-ML-frameworks/get-google-saxml/index.md delete mode 100644 docs/AI-ML-frameworks/get-onnxruntime-prebuilt/index.md delete mode 100644 docs/AI-ML-frameworks/get-qaic-apps-sdk/index.md delete mode 100644 docs/AI-ML-frameworks/get-qaic-platform-sdk/index.md delete mode 100644 docs/AI-ML-frameworks/get-qaic-software-kit/index.md delete mode 100644 docs/AI-ML-frameworks/get-rocm/index.md delete mode 100644 docs/AI-ML-frameworks/get-tvm/index.md create mode 100644 docs/AI-ML-frameworks/index.md delete mode 100644 docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src/index.md delete mode 100644 docs/AI-ML-frameworks/install-rocm/index.md delete mode 100644 docs/AI-ML-frameworks/install-tensorflow-for-c/index.md delete mode 100644 docs/AI-ML-frameworks/install-tensorflow-from-src/index.md delete mode 100644 docs/AI-ML-frameworks/install-tflite-from-src/index.md delete mode 100644 docs/AI-ML-models/convert-ml-model-huggingface-to-onnx/index.md delete mode 100644 docs/AI-ML-models/get-bert-squad-vocab/index.md delete mode 100644 docs/AI-ML-models/get-dlrm/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-3d-unet-kits19/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-bert-base-squad/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-bert-large-squad/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-dlrm-terabyte/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-efficientnet-lite/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-gptj/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-huggingface-zoo/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-llama2/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-mobilenet/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-neuralmagic-zoo/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-resnet50/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-retinanet-nvidia/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-retinanet/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-rnnt/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-stable-diffusion/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-tiny-resnet/index.md delete mode 100644 docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo/index.md delete mode 100644 docs/AI-ML-models/get-tvm-model/index.md create mode 100644 docs/AI-ML-models/index.md delete mode 100644 docs/AI-ML-optimization/calibrate-model-for.qaic/index.md delete mode 100644 docs/AI-ML-optimization/compile-model-for.qaic/index.md create mode 100644 docs/AI-ML-optimization/index.md delete mode 100644 docs/AI-ML-optimization/prune-bert-models/index.md create mode 100644 docs/CM-Interface/index.md create mode 100644 docs/CM-automation/index.md create mode 100644 docs/CM-interface-prototyping/index.md delete mode 100644 docs/CM-interface-prototyping/test-mlperf-inference-retinanet/index.md delete mode 100644 docs/CUDA-automation/get-cuda-devices/index.md delete mode 100644 docs/CUDA-automation/get-cuda/index.md delete mode 100644 docs/CUDA-automation/get-cudnn/index.md delete mode 100644 docs/CUDA-automation/get-tensorrt/index.md create mode 100644 docs/CUDA-automation/index.md delete mode 100644 docs/CUDA-automation/install-cuda-package-manager/index.md delete mode 100644 docs/CUDA-automation/install-cuda-prebuilt/index.md delete mode 100644 docs/Cloud-automation/destroy-terraform/index.md delete mode 100644 docs/Cloud-automation/get-aws-cli/index.md delete mode 100644 docs/Cloud-automation/get-terraform/index.md create mode 100644 docs/Cloud-automation/index.md delete mode 100644 docs/Cloud-automation/install-aws-cli/index.md delete mode 100644 docs/Cloud-automation/install-terraform-from-src/index.md delete mode 100644 docs/Cloud-automation/run-terraform/index.md create mode 100644 docs/Collective-benchmarking/index.md delete mode 100644 docs/Collective-benchmarking/launch-benchmark/index.md delete mode 100644 docs/Compiler-automation/get-aocl/index.md delete mode 100644 docs/Compiler-automation/get-cl/index.md delete mode 100644 docs/Compiler-automation/get-compiler-flags/index.md delete mode 100644 docs/Compiler-automation/get-compiler-rust/index.md delete mode 100644 docs/Compiler-automation/get-gcc/index.md delete mode 100644 docs/Compiler-automation/get-go/index.md delete mode 100644 docs/Compiler-automation/get-llvm/index.md create mode 100644 docs/Compiler-automation/index.md delete mode 100644 docs/Compiler-automation/install-gcc-src/index.md delete mode 100644 docs/Compiler-automation/install-ipex-from-src/index.md delete mode 100644 docs/Compiler-automation/install-llvm-prebuilt/index.md delete mode 100644 docs/Compiler-automation/install-llvm-src/index.md delete mode 100644 docs/Compiler-automation/install-onednn-from-src/index.md delete mode 100644 docs/Compiler-automation/install-onnxruntime-from-src/index.md delete mode 100644 docs/Compiler-automation/install-pytorch-from-src/index.md delete mode 100644 docs/Compiler-automation/install-pytorch-kineto-from-src/index.md delete mode 100644 docs/Compiler-automation/install-torchvision-from-src/index.md delete mode 100644 docs/Compiler-automation/install-tpp-pytorch-extension/index.md delete mode 100644 docs/Compiler-automation/install-transformers-from-src/index.md create mode 100644 docs/Dashboard-automation/index.md delete mode 100644 docs/Dashboard-automation/publish-results-to-dashboard/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-aria2/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-bazel/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-blis/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-brew/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-cmake/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-docker/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-google-test/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-java/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-javac/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-openssl/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-rclone/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn/index.md create mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/install-bazel/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/install-gflags/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src/index.md delete mode 100644 docs/Detection-or-installation-of-tools-and-artifacts/install-openssl/index.md delete mode 100644 docs/DevOps-automation/benchmark-program/index.md delete mode 100644 docs/DevOps-automation/compile-program/index.md delete mode 100644 docs/DevOps-automation/convert-csv-to-md/index.md delete mode 100644 docs/DevOps-automation/copy-to-clipboard/index.md delete mode 100644 docs/DevOps-automation/create-conda-env/index.md delete mode 100644 docs/DevOps-automation/create-patch/index.md delete mode 100644 docs/DevOps-automation/detect-sudo/index.md delete mode 100644 docs/DevOps-automation/download-and-extract/index.md delete mode 100644 docs/DevOps-automation/download-file/index.md delete mode 100644 docs/DevOps-automation/download-torrent/index.md delete mode 100644 docs/DevOps-automation/extract-file/index.md delete mode 100644 docs/DevOps-automation/fail/index.md delete mode 100644 docs/DevOps-automation/get-conda/index.md delete mode 100644 docs/DevOps-automation/get-git-repo/index.md delete mode 100644 docs/DevOps-automation/get-github-cli/index.md create mode 100644 docs/DevOps-automation/index.md delete mode 100644 docs/DevOps-automation/pull-git-repo/index.md delete mode 100644 docs/DevOps-automation/push-csv-to-spreadsheet/index.md delete mode 100644 docs/DevOps-automation/set-device-settings-qaic/index.md delete mode 100644 docs/DevOps-automation/set-echo-off-win/index.md delete mode 100644 docs/DevOps-automation/set-performance-mode/index.md delete mode 100644 docs/DevOps-automation/set-sqlite-dir/index.md delete mode 100644 docs/DevOps-automation/tar-my-folder/index.md delete mode 100644 docs/Docker-automation/build-docker-image/index.md delete mode 100644 docs/Docker-automation/build-dockerfile/index.md create mode 100644 docs/Docker-automation/index.md delete mode 100644 docs/Docker-automation/prune-docker/index.md delete mode 100644 docs/Docker-automation/run-docker-container/index.md delete mode 100644 docs/GUI/gui/index.md create mode 100644 docs/GUI/index.md delete mode 100644 docs/Legacy-CK-support/get-ck-repo-mlops/index.md delete mode 100644 docs/Legacy-CK-support/get-ck/index.md create mode 100644 docs/Legacy-CK-support/index.md delete mode 100644 docs/MLPerf-benchmark-support/add-custom-nvidia-system/index.md delete mode 100644 docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation/index.md delete mode 100644 docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia/index.md delete mode 100644 docs/MLPerf-benchmark-support/generate-mlperf-inference-submission/index.md delete mode 100644 docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf/index.md delete mode 100644 docs/MLPerf-benchmark-support/generate-mlperf-tiny-report/index.md delete mode 100644 docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission/index.md delete mode 100644 docs/MLPerf-benchmark-support/generate-nvidia-engine/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-results/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-src/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-logging/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-power-dev/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-tiny-src/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-mlperf-training-src/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-nvidia-mitten/index.md delete mode 100644 docs/MLPerf-benchmark-support/get-spec-ptd/index.md delete mode 100644 docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment/index.md delete mode 100644 docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment/index.md delete mode 100644 docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment/index.md create mode 100644 docs/MLPerf-benchmark-support/index.md delete mode 100644 docs/MLPerf-benchmark-support/install-mlperf-logging-from-src/index.md delete mode 100644 docs/MLPerf-benchmark-support/prepare-training-data-bert/index.md delete mode 100644 docs/MLPerf-benchmark-support/prepare-training-data-resnet/index.md delete mode 100644 docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission/index.md delete mode 100644 docs/MLPerf-benchmark-support/process-mlperf-accuracy/index.md delete mode 100644 docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github/index.md delete mode 100644 docs/MLPerf-benchmark-support/run-all-mlperf-models/index.md delete mode 100644 docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models/index.md delete mode 100644 docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker/index.md delete mode 100644 docs/MLPerf-benchmark-support/run-mlperf-power-client/index.md delete mode 100644 docs/MLPerf-benchmark-support/run-mlperf-power-server/index.md delete mode 100644 docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker/index.md delete mode 100644 docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log/index.md delete mode 100644 docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py/index.md delete mode 100644 docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp/index.md delete mode 100644 docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py/index.md delete mode 100644 docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py/index.md delete mode 100644 docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py/index.md create mode 100644 docs/Modular-AI-ML-application-pipeline/index.md delete mode 100644 docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy/index.md delete mode 100644 docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel/index.md delete mode 100644 docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm/index.md create mode 100644 docs/Modular-MLPerf-benchmarks/index.md delete mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python/index.md delete mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite/index.md delete mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp/index.md delete mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python/index.md delete mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference/index.md delete mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf/index.md create mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/index.md delete mode 100644 docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app/index.md delete mode 100644 docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia/index.md delete mode 100644 docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference/index.md create mode 100644 docs/Modular-MLPerf-training-benchmark-pipeline/index.md delete mode 100644 docs/Modular-application-pipeline/app-image-corner-detection/index.md create mode 100644 docs/Modular-application-pipeline/index.md delete mode 100644 docs/Platform-information/detect-cpu/index.md delete mode 100644 docs/Platform-information/detect-os/index.md create mode 100644 docs/Platform-information/index.md delete mode 100644 docs/Python-automation/activate-python-venv/index.md delete mode 100644 docs/Python-automation/get-generic-python-lib/index.md delete mode 100644 docs/Python-automation/get-python3/index.md create mode 100644 docs/Python-automation/index.md delete mode 100644 docs/Python-automation/install-generic-conda-package/index.md delete mode 100644 docs/Python-automation/install-python-src/index.md delete mode 100644 docs/Python-automation/install-python-venv/index.md create mode 100644 docs/Remote-automation/index.md delete mode 100644 docs/Remote-automation/remote-run-commands/index.md delete mode 100644 docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia/index.md create mode 100644 docs/Reproduce-MLPerf-benchmarks/index.md delete mode 100644 docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results/index.md delete mode 100644 docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia/index.md delete mode 100644 docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission/index.md delete mode 100644 docs/Reproducibility-and-artifact-evaluation/get-ipol-src/index.md create mode 100644 docs/Reproducibility-and-artifact-evaluation/index.md delete mode 100644 docs/Reproducibility-and-artifact-evaluation/process-ae-users/index.md delete mode 100644 docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439/index.md delete mode 100644 docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima/index.md create mode 100644 docs/Tests/index.md delete mode 100644 docs/Tests/print-croissant-desc/index.md delete mode 100644 docs/Tests/print-hello-world-java/index.md delete mode 100644 docs/Tests/print-hello-world-javac/index.md delete mode 100644 docs/Tests/print-hello-world-py/index.md delete mode 100644 docs/Tests/print-hello-world/index.md delete mode 100644 docs/Tests/print-python-version/index.md delete mode 100644 docs/Tests/run-python/index.md delete mode 100644 docs/Tests/test-deps-conditions/index.md delete mode 100644 docs/Tests/test-download-and-extract-artifacts/index.md delete mode 100644 docs/Tests/test-set-sys-user-cm/index.md delete mode 100644 docs/Tests/upgrade-python-pip/index.md delete mode 100644 docs/TinyML-automation/create-fpgaconvnet-app-tinyml/index.md delete mode 100644 docs/TinyML-automation/create-fpgaconvnet-config-tinyml/index.md delete mode 100644 docs/TinyML-automation/flash-tinyml-binary/index.md delete mode 100644 docs/TinyML-automation/get-microtvm/index.md delete mode 100644 docs/TinyML-automation/get-zephyr-sdk/index.md delete mode 100644 docs/TinyML-automation/get-zephyr/index.md create mode 100644 docs/TinyML-automation/index.md delete mode 100644 mkdocsHelper.py diff --git a/docs/AI-ML-datasets/get-croissant/index.md b/docs/AI-ML-datasets/get-croissant/index.md deleted file mode 100644 index 3c62b3bc5..000000000 --- a/docs/AI-ML-datasets/get-croissant/index.md +++ /dev/null @@ -1,126 +0,0 @@ -Automatically generated README for this automation recipe: **get-croissant** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-croissant,8fd653eac8da4c14) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *get,mlcommons,croissant* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get mlcommons croissant" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,mlcommons,croissant` - -`cm run script --tags=get,mlcommons,croissant ` - -*or* - -`cmr "get mlcommons croissant"` - -`cmr "get mlcommons croissant " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,mlcommons,croissant' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,mlcommons,croissant"``` - -#### Run this script via Docker (beta) - -`cm docker script "get mlcommons croissant" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * git,repo,_repo.https://github.com/mlcommons/croissant - * CM names: `--adr.['git-mlcommons-croissant']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-croissant/_cm.yaml) - -___ -### Script output -`cmr "get mlcommons croissant " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-cifar10/index.md b/docs/AI-ML-datasets/get-dataset-cifar10/index.md deleted file mode 100644 index e6caa091c..000000000 --- a/docs/AI-ML-datasets/get-dataset-cifar10/index.md +++ /dev/null @@ -1,164 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-cifar10** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-cifar10,2f0c0bb3663b4ed7) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,cifar10,image-classification,validation,training* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset cifar10 image-classification validation training" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,cifar10,image-classification,validation,training` - -`cm run script --tags=get,dataset,cifar10,image-classification,validation,training[,variations] ` - -*or* - -`cmr "get dataset cifar10 image-classification validation training"` - -`cmr "get dataset cifar10 image-classification validation training [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,cifar10,image-classification,validation,training' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,cifar10,image-classification,validation,training"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset cifar10 image-classification validation training[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_tiny` - - Environment variables: - - *CM_DATASET_CONVERT_TO_TINYMLPERF*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,tinymlperf,src - - CM script: [get-mlperf-tiny-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-tiny-src) - * get,src,eembc,energy-runner - - CM script: [get-mlperf-tiny-eembc-energy-runner-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-tiny-eembc-energy-runner-src) - -
    - - - * Group "**data_format**" -
    - Click here to expand this section. - - * **`_python`** (default) - - Environment variables: - - *CM_DATASET*: `CIFAR10` - - *CM_DATASET_FILENAME*: `cifar-10-python.tar.gz` - - *CM_DATASET_FILENAME1*: `cifar-10-python.tar` - - *CM_DATASET_CIFAR10*: `https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz` - - Workflow: - -
    - - -#### Default variations - -`_python` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cifar10/_cm.json) - -___ -### Script output -`cmr "get dataset cifar10 image-classification validation training [,variations]" -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-cnndm/index.md b/docs/AI-ML-datasets/get-dataset-cnndm/index.md deleted file mode 100644 index 85be98b6a..000000000 --- a/docs/AI-ML-datasets/get-dataset-cnndm/index.md +++ /dev/null @@ -1,175 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-cnndm** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-cnndm,aed298c156e24257) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,gpt-j,cnndm,cnn-dailymail,original* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset gpt-j cnndm cnn-dailymail original" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,gpt-j,cnndm,cnn-dailymail,original` - -`cm run script --tags=get,dataset,gpt-j,cnndm,cnn-dailymail,original[,variations] ` - -*or* - -`cmr "get dataset gpt-j cnndm cnn-dailymail original"` - -`cmr "get dataset gpt-j cnndm cnn-dailymail original [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,gpt-j,cnndm,cnn-dailymail,original' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,gpt-j,cnndm,cnn-dailymail,original"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset gpt-j cnndm cnn-dailymail original[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_intel` - - Workflow: - * `_intel,validation` - - Environment variables: - - *CM_CNNDM_INTEL_VARIATION*: `yes` - - Workflow: - -
    - - - * Group "**dataset-type**" -
    - Click here to expand this section. - - * `_calibration` - - Environment variables: - - *CM_DATASET_CALIBRATION*: `yes` - - Workflow: - * **`_validation`** (default) - - Environment variables: - - *CM_DATASET_CALIBRATION*: `no` - - Workflow: - -
    - - -#### Default variations - -`_validation` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_DATASET_CALIBRATION: `no` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/_cm.json)*** - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * mlperf,inference,source - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CNNDM_INTEL_VARIATION': ['yes']}` - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,generic-python-lib,_package.simplejson - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_datasets - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.tokenizers - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/_cm.json) - 1. ***Run native script if exists*** - * [run-intel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/run-intel.sh) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-cnndm/_cm.json) - -___ -### Script output -`cmr "get dataset gpt-j cnndm cnn-dailymail original [,variations]" -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-coco/index.md b/docs/AI-ML-datasets/get-dataset-coco/index.md deleted file mode 100644 index 33aded32e..000000000 --- a/docs/AI-ML-datasets/get-dataset-coco/index.md +++ /dev/null @@ -1,215 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-coco** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-coco,c198e1f60ac6445c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,object-detection,coco* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset object-detection coco" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,object-detection,coco` - -`cm run script --tags=get,dataset,object-detection,coco[,variations] [--input_flags]` - -*or* - -`cmr "get dataset object-detection coco"` - -`cmr "get dataset object-detection coco [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,object-detection,coco' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,object-detection,coco"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset object-detection coco[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**size**" -
    - Click here to expand this section. - - * **`_complete`** (default) - - Environment variables: - - *CM_DATASET_COCO_SIZE*: `complete` - - Workflow: - * `_small` - - Environment variables: - - *CM_DATASET_COCO_SIZE*: `small` - - Workflow: - -
    - - - * Group "**type**" -
    - Click here to expand this section. - - * `_train` - - Environment variables: - - *CM_DATASET_COCO_TYPE*: `train` - - Workflow: - * **`_val`** (default) - - Environment variables: - - *CM_DATASET_COCO_TYPE*: `val` - - Workflow: - -
    - - - * Group "**version**" -
    - Click here to expand this section. - - * **`_2017`** (default) - - Environment variables: - - *CM_DATASET_COCO_VERSION*: `2017` - - Workflow: - -
    - - -#### Default variations - -`_2017,_complete,_val` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--from=value` → `CM_FROM=value` -* `--home=value` → `CM_HOME_DIR=value` -* `--store=value` → `CM_STORE=value` -* `--to=value` → `CM_TO=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "from":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/_cm.json)*** - * download-and-extract,file,_wget,_extract - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_DATASET_COCO_DETECTED': ['yes']}` - * CM names: `--adr.['get-dataset-coco-data', '746e5dad5e784ad6']...` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - * download-and-extract,file,_wget,_extract - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_DATASET_COCO_DETECTED': ['yes']}` - * CM names: `--adr.['get-dataset-coco-annotations', 'edb6cd092ff64171']...` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco/_cm.json) - -___ -### Script output -`cmr "get dataset object-detection coco [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_COCO*` -* `CM_DATASET_PATH` -* `CM_DATASET_PATH_ROOT` -#### New environment keys auto-detected from customize - -* `CM_DATASET_COCO_ANNOTATIONS_PATH` -* `CM_DATASET_COCO_DATA_PATH` -* `CM_DATASET_COCO_DETECTED` -* `CM_DATASET_COCO_MD5SUM_ANN` -* `CM_DATASET_COCO_MD5SUM_DATA` -* `CM_DATASET_COCO_PATH` -* `CM_DATASET_COCO_TYPE` -* `CM_DATASET_COCO_TYPE_AND_VERSION` -* `CM_DATASET_COCO_URL_ANNOTATIONS_FULL` -* `CM_DATASET_COCO_URL_DATA_FULL` -* `CM_DATASET_COCO_VERSION` -* `CM_DATASET_PATH` -* `CM_DATASET_PATH_ROOT` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-coco2014/index.md b/docs/AI-ML-datasets/get-dataset-coco2014/index.md deleted file mode 100644 index e13dc04fe..000000000 --- a/docs/AI-ML-datasets/get-dataset-coco2014/index.md +++ /dev/null @@ -1,204 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-coco2014** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-coco2014,3f7ad9d42f4040f8) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,coco2014,object-detection,original* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset coco2014 object-detection original" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,coco2014,object-detection,original` - -`cm run script --tags=get,dataset,coco2014,object-detection,original[,variations] ` - -*or* - -`cmr "get dataset coco2014 object-detection original"` - -`cmr "get dataset coco2014 object-detection original [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,coco2014,object-detection,original' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,coco2014,object-detection,original"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset coco2014 object-detection original[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**annotations**" -
    - Click here to expand this section. - - * `_custom-annotations` - - Environment variables: - - *CM_DATASET_COCO2014_CUSTOM_ANNOTATIONS*: `yes` - - Workflow: - * **`_default-annotations`** (default) - - Environment variables: - - *CM_DATASET_COCO2014_CUSTOM_ANNOTATIONS*: `no` - - Workflow: - -
    - - - * Group "**dataset-type**" -
    - Click here to expand this section. - - * `_calibration` - - Environment variables: - - *CM_DATASET_CALIBRATION*: `yes` - - Workflow: - * **`_validation`** (default) - - Environment variables: - - *CM_DATASET_CALIBRATION*: `no` - - Workflow: - -
    - - - * Group "**size**" -
    - Click here to expand this section. - - * **`_50`** (default) - - Environment variables: - - *CM_DATASET_SIZE*: `50` - - Workflow: - * `_500` - - Environment variables: - - *CM_DATASET_SIZE*: `500` - - Workflow: - * `_full` - - Environment variables: - - *CM_DATASET_SIZE*: `` - - Workflow: - * `_size.#` - - Environment variables: - - *CM_DATASET_SIZE*: `#` - - Workflow: - -
    - - -#### Default variations - -`_50,_default-annotations,_validation` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_DATASET_CALIBRATION: `no` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/_cm.yaml)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_package.tqdm - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * mlperf,inference,source - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/run.sh) - 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/_cm.yaml)*** - * get,coco2014,annotations - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DATASET_COCO2014_CUSTOM_ANNOTATIONS': ['yes']}` - - *Warning: no scripts found* - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-coco2014/_cm.yaml) - -___ -### Script output -`cmr "get dataset coco2014 object-detection original [,variations]" -j` -#### New environment keys (filter) - -* `CM_CALIBRATION_DATASET_PATH` -* `CM_DATASET_ANNOTATIONS_DIR_PATH` -* `CM_DATASET_ANNOTATIONS_FILE_PATH` -* `CM_DATASET_PATH` -* `CM_DATASET_PATH_ROOT` -#### New environment keys auto-detected from customize - -* `CM_CALIBRATION_DATASET_PATH` -* `CM_DATASET_PATH` -* `CM_DATASET_PATH_ROOT` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-criteo/index.md b/docs/AI-ML-datasets/get-dataset-criteo/index.md deleted file mode 100644 index 5f2b29d83..000000000 --- a/docs/AI-ML-datasets/get-dataset-criteo/index.md +++ /dev/null @@ -1,154 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-criteo** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-criteo,194a47d908714897) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,criteo,original* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset criteo original" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,criteo,original` - -`cm run script --tags=get,dataset,criteo,original[,variations] [--input_flags]` - -*or* - -`cmr "get dataset criteo original"` - -`cmr "get dataset criteo original [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,criteo,original' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,criteo,original"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset criteo original[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_backup` - - Environment variables: - - *CM_BACKUP_ZIPS*: `yes` - - Workflow: - * `_fake` - - Environment variables: - - *CM_CRITEO_FAKE*: `yes` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--criteo_path=value` → `CM_CRITEO_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "criteo_path":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_BACKUP_ZIPS: `no` - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/_cm.json) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-criteo/_cm.json) - -___ -### Script output -`cmr "get dataset criteo original [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET*` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-aux/index.md b/docs/AI-ML-datasets/get-dataset-imagenet-aux/index.md deleted file mode 100644 index e5d3a126c..000000000 --- a/docs/AI-ML-datasets/get-dataset-imagenet-aux/index.md +++ /dev/null @@ -1,155 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-imagenet-aux** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-aux,bb2c6dd8c8c64217) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,aux,dataset-aux,image-classification,imagenet-aux* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get aux dataset-aux image-classification imagenet-aux" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,aux,dataset-aux,image-classification,imagenet-aux` - -`cm run script --tags=get,aux,dataset-aux,image-classification,imagenet-aux[,variations] ` - -*or* - -`cmr "get aux dataset-aux image-classification imagenet-aux"` - -`cmr "get aux dataset-aux image-classification imagenet-aux [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,aux,dataset-aux,image-classification,imagenet-aux' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,aux,dataset-aux,image-classification,imagenet-aux"``` - -#### Run this script via Docker (beta) - -`cm docker script "get aux dataset-aux image-classification imagenet-aux[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_2012` - - Environment variables: - - *CM_DATASET_AUX_VER*: `2012` - - Workflow: - -
    - - - * Group "**download-source**" -
    - Click here to expand this section. - - * `_from.berkeleyvision` - - Environment variables: - - *CM_WGET_URL*: `http://dl.caffe.berkeleyvision.org/caffe_ilsvrc12.tar.gz` - - Workflow: - * **`_from.dropbox`** (default) - - Environment variables: - - *CM_WGET_URL*: `https://www.dropbox.com/s/92n2fyej3lzy3s3/caffe_ilsvrc12.tar.gz` - - Workflow: - -
    - - -#### Default variations - -`_from.dropbox` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/_cm.json) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-aux/_cm.json) - -___ -### Script output -`cmr "get aux dataset-aux image-classification imagenet-aux [,variations]" -j` -#### New environment keys (filter) - -* `CM_DATASET_AUX_*` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-calibration/index.md b/docs/AI-ML-datasets/get-dataset-imagenet-calibration/index.md deleted file mode 100644 index 76ae3ca52..000000000 --- a/docs/AI-ML-datasets/get-dataset-imagenet-calibration/index.md +++ /dev/null @@ -1,146 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-imagenet-calibration** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-calibration,30361fad3dff49ff) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,imagenet,calibration* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset imagenet calibration" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,imagenet,calibration` - -`cm run script --tags=get,dataset,imagenet,calibration[,variations] ` - -*or* - -`cmr "get dataset imagenet calibration"` - -`cmr "get dataset imagenet calibration [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,imagenet,calibration' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,imagenet,calibration"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset imagenet calibration[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**calibration-option**" -
    - Click here to expand this section. - - * **`_mlperf.option1`** (default) - - Environment variables: - - *CM_MLPERF_IMAGENET_CALIBRATION_OPTION*: `one` - - *CM_DOWNLOAD_CHECKSUM*: `f09719174af3553119e2c621157773a6` - - Workflow: - * `_mlperf.option2` - - Environment variables: - - *CM_MLPERF_IMAGENET_CALIBRATION_OPTION*: `two` - - *CM_DOWNLOAD_CHECKSUM*: `e44582af00e3b4fc3fac30efd6bdd05f` - - Workflow: - -
    - - -#### Default variations - -`_mlperf.option1` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration/_cm.yaml)*** - * download,file - * CM names: `--adr.['calibration-file-downloader']...` - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration/_cm.yaml) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration/_cm.yaml) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-calibration/_cm.yaml) - -___ -### Script output -`cmr "get dataset imagenet calibration [,variations]" -j` -#### New environment keys (filter) - -* `CM_MLPERF_IMAGENET_CALIBRATION_LIST_FILE_WITH_PATH` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-helper/index.md b/docs/AI-ML-datasets/get-dataset-imagenet-helper/index.md deleted file mode 100644 index 6ce0dc22e..000000000 --- a/docs/AI-ML-datasets/get-dataset-imagenet-helper/index.md +++ /dev/null @@ -1,120 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-imagenet-helper** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-helper,a6c3c321d07742f9) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,imagenet,helper,imagenet-helper* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get imagenet helper imagenet-helper" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,imagenet,helper,imagenet-helper` - -`cm run script --tags=get,imagenet,helper,imagenet-helper ` - -*or* - -`cmr "get imagenet helper imagenet-helper"` - -`cmr "get imagenet helper imagenet-helper " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,imagenet,helper,imagenet-helper' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,imagenet,helper,imagenet-helper"``` - -#### Run this script via Docker (beta) - -`cm docker script "get imagenet helper imagenet-helper" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/_cm.json) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-helper/_cm.json) - -___ -### Script output -`cmr "get imagenet helper imagenet-helper " -j` -#### New environment keys (filter) - -* `+PYTHONPATH` -* `CM_DATASET_IMAGENET_HELPER_PATH` -#### New environment keys auto-detected from customize - -* `CM_DATASET_IMAGENET_HELPER_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-train/index.md b/docs/AI-ML-datasets/get-dataset-imagenet-train/index.md deleted file mode 100644 index a6c7feb9f..000000000 --- a/docs/AI-ML-datasets/get-dataset-imagenet-train/index.md +++ /dev/null @@ -1,149 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-imagenet-train** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-train,2bec165da5cc4ebf) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,imagenet,train,dataset,original* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get imagenet train dataset original" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,imagenet,train,dataset,original` - -`cm run script --tags=get,imagenet,train,dataset,original [--input_flags]` - -*or* - -`cmr "get imagenet train dataset original"` - -`cmr "get imagenet train dataset original " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,imagenet,train,dataset,original' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,imagenet,train,dataset,original"``` - -#### Run this script via Docker (beta) - -`cm docker script "get imagenet train dataset original" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--input=value` → `IMAGENET_TRAIN_PATH=value` -* `--torrent=value` → `CM_DATASET_IMAGENET_TRAIN_TORRENT_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "input":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/_cm.json)*** - * download-and-extract,file,_extract - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DATASET_IMAGENET_VAL_REQUIRE_DAE': ['yes', 'True']}` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - * file,extract - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DAE_ONLY_EXTRACT': ['yes', 'True']}` - - CM script: [extract-file](https://github.com/mlcommons/cm4mlops/tree/master/script/extract-file) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-train/_cm.json) - -___ -### Script output -`cmr "get imagenet train dataset original " [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_IMAGENET_*` -* `CM_DATASET_PATH` -#### New environment keys auto-detected from customize - -* `CM_DATASET_IMAGENET_PATH` -* `CM_DATASET_IMAGENET_TRAIN_PATH` -* `CM_DATASET_IMAGENET_TRAIN_REQUIRE_DAE` -* `CM_DATASET_IMAGENET_VAL_REQUIRE_DAE` -* `CM_DATASET_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-imagenet-val/index.md b/docs/AI-ML-datasets/get-dataset-imagenet-val/index.md deleted file mode 100644 index 09c78b485..000000000 --- a/docs/AI-ML-datasets/get-dataset-imagenet-val/index.md +++ /dev/null @@ -1,211 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-imagenet-val** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-imagenet-val,7afd58d287fe4f11) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,val,validation,dataset,imagenet,ILSVRC,image-classification,original* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get val validation dataset imagenet ILSVRC image-classification original" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,val,validation,dataset,imagenet,ILSVRC,image-classification,original` - -`cm run script --tags=get,val,validation,dataset,imagenet,ILSVRC,image-classification,original[,variations] [--input_flags]` - -*or* - -`cmr "get val validation dataset imagenet ILSVRC image-classification original"` - -`cmr "get val validation dataset imagenet ILSVRC image-classification original [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,val,validation,dataset,imagenet,ILSVRC,image-classification,original' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,val,validation,dataset,imagenet,ILSVRC,image-classification,original"``` - -#### Run this script via Docker (beta) - -`cm docker script "get val validation dataset imagenet ILSVRC image-classification original[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_2012-500` - - Workflow: - * `_2012-full` - - Workflow: - * `_run-during-docker-build` - - Workflow: - -
    - - - * Group "**count**" -
    - Click here to expand this section. - - * `_full` - - Environment variables: - - *CM_DATASET_SIZE*: `50000` - - *CM_IMAGENET_FULL*: `yes` - - *CM_DAE_FILENAME*: `ILSVRC2012_img_val.tar` - - *CM_DAE_DOWNLOADED_CHECKSUM*: `29b22e2961454d5413ddabcf34fc5622` - - Workflow: - * `_size.#` - - Environment variables: - - *CM_DATASET_SIZE*: `#` - - Workflow: - * **`_size.500`** (default) - - Environment variables: - - *CM_DATASET_SIZE*: `500` - - *CM_DAE_FILENAME*: `ILSVRC2012_img_val_500.tar` - - *CM_DAE_URL*: `http://cKnowledge.org/ai/data/ILSVRC2012_img_val_500.tar` - - Workflow: - -
    - - - * Group "**dataset-version**" -
    - Click here to expand this section. - - * **`_2012`** (default) - - Environment variables: - - *CM_DATASET_VER*: `2012` - - Workflow: - -
    - - -#### Default variations - -`_2012,_size.500` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--imagenet_path=value` → `IMAGENET_PATH=value` -* `--torrent=value` → `CM_DATASET_IMAGENET_VAL_TORRENT_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "imagenet_path":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/_cm.json)*** - * download-and-extract,file,_extract - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DATASET_IMAGENET_VAL_REQUIRE_DAE': ['yes', 'True']}` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - * file,extract,_no-remove-extracted - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DAE_ONLY_EXTRACT': ['yes', 'True']}` - - CM script: [extract-file](https://github.com/mlcommons/cm4mlops/tree/master/script/extract-file) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/run.bat) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-imagenet-val/_cm.json) - -___ -### Script output -`cmr "get val validation dataset imagenet ILSVRC image-classification original [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_IMAGENET_PATH` -* `CM_DATASET_IMAGENET_VAL_PATH` -* `CM_DATASET_PATH` -* `CM_DATASET_SIZE` -* `CM_DATASET_VER` -#### New environment keys auto-detected from customize - -* `CM_DATASET_IMAGENET_PATH` -* `CM_DATASET_IMAGENET_VAL_PATH` -* `CM_DATASET_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-kits19/index.md b/docs/AI-ML-datasets/get-dataset-kits19/index.md deleted file mode 100644 index 53f222b56..000000000 --- a/docs/AI-ML-datasets/get-dataset-kits19/index.md +++ /dev/null @@ -1,172 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-kits19** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-kits19,79992bb221024ac5) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,medical-imaging,kits,original,kits19* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset medical-imaging kits original kits19" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,medical-imaging,kits,original,kits19` - -`cm run script --tags=get,dataset,medical-imaging,kits,original,kits19[,variations] ` - -*or* - -`cmr "get dataset medical-imaging kits original kits19"` - -`cmr "get dataset medical-imaging kits original kits19 [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,medical-imaging,kits,original,kits19' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,medical-imaging,kits,original,kits19"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset medical-imaging kits original kits19[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_calibration` - - Environment variables: - - *CM_DATASET_CALIBRATION*: `yes` - - Workflow: - * `_default` - - Environment variables: - - *CM_GIT_PATCH*: `no` - - Workflow: - * `_full-history` - - Environment variables: - - *CM_GIT_DEPTH*: `` - - Workflow: - * `_no-recurse-submodules` - - Environment variables: - - *CM_GIT_RECURSE_SUBMODULES*: `` - - Workflow: - * `_patch` - - Environment variables: - - *CM_GIT_PATCH*: `yes` - - Workflow: - * `_short-history` - - Environment variables: - - *CM_GIT_DEPTH*: `--depth 5` - - Workflow: - * `_validation` - - Environment variables: - - *CM_DATASET_VALIDATION*: `yes` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_CHECKOUT: `master` -* CM_GIT_DEPTH: `--depth 2` -* CM_GIT_PATCH: `no` -* CM_GIT_RECURSE_SUBMODULES: `` -* CM_GIT_URL: `https://github.com/neheller/kits19` - -
    - -#### Versions -Default version: `master` - -* `custom` -* `master` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-kits19/_cm.json) - -___ -### Script output -`cmr "get dataset medical-imaging kits original kits19 [,variations]" -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -#### New environment keys auto-detected from customize - -* `CM_DATASET_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-librispeech/index.md b/docs/AI-ML-datasets/get-dataset-librispeech/index.md deleted file mode 100644 index 170522f4c..000000000 --- a/docs/AI-ML-datasets/get-dataset-librispeech/index.md +++ /dev/null @@ -1,134 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-librispeech** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-librispeech,09f29df607e0415d) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset speech speech-recognition librispeech validation audio training original" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original` - -`cm run script --tags=get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original ` - -*or* - -`cmr "get dataset speech speech-recognition librispeech validation audio training original"` - -`cmr "get dataset speech speech-recognition librispeech validation audio training original " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,speech,speech-recognition,librispeech,validation,audio,training,original"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset speech speech-recognition librispeech validation audio training original" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `dev-clean` - -* `dev-clean` -* `dev-other` -* `test-clean` -* `test-other` -* `train-clean-100` -* `train-clean-360` -* `train-other-500` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/_cm.json)*** - * get,sys-utils-cm - * CM names: `--adr.['sys-utils']...` - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-librispeech/_cm.json) - -___ -### Script output -`cmr "get dataset speech speech-recognition librispeech validation audio training original " -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -#### New environment keys auto-detected from customize - -* `CM_DATASET_LIBRISPEECH_PATH` -* `CM_DATASET_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-openimages-annotations/index.md b/docs/AI-ML-datasets/get-dataset-openimages-annotations/index.md deleted file mode 100644 index c7b470c4d..000000000 --- a/docs/AI-ML-datasets/get-dataset-openimages-annotations/index.md +++ /dev/null @@ -1,144 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-openimages-annotations** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-openimages-annotations,47e2158ed24c44e9) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,aux,dataset-aux,object-detection,openimages,annotations* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get aux dataset-aux object-detection openimages annotations" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,aux,dataset-aux,object-detection,openimages,annotations` - -`cm run script --tags=get,aux,dataset-aux,object-detection,openimages,annotations[,variations] ` - -*or* - -`cmr "get aux dataset-aux object-detection openimages annotations"` - -`cmr "get aux dataset-aux object-detection openimages annotations [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,aux,dataset-aux,object-detection,openimages,annotations' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,aux,dataset-aux,object-detection,openimages,annotations"``` - -#### Run this script via Docker (beta) - -`cm docker script "get aux dataset-aux object-detection openimages annotations[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**download-source**" -
    - Click here to expand this section. - - * **`_from.github`** (default) - - Environment variables: - - *CM_WGET_URL*: `https://github.com/mlcommons/inference/releases/download/v2.1/openimages-mlperf_annotations_2.1.json.zip` - - Workflow: - -
    - - -#### Default variations - -`_from.github` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-annotations/_cm.json) - -___ -### Script output -`cmr "get aux dataset-aux object-detection openimages annotations [,variations]" -j` -#### New environment keys (filter) - -* `CM_DATASET_ANNOTATIONS_*` -* `CM_DATASET_OPENIMAGES_ANNOTATIONS_*` -#### New environment keys auto-detected from customize - -* `CM_DATASET_ANNOTATIONS_DIR_PATH` -* `CM_DATASET_ANNOTATIONS_FILE_PATH` -* `CM_DATASET_OPENIMAGES_ANNOTATIONS_DIR_PATH` -* `CM_DATASET_OPENIMAGES_ANNOTATIONS_FILE_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-openimages-calibration/index.md b/docs/AI-ML-datasets/get-dataset-openimages-calibration/index.md deleted file mode 100644 index 969e9872d..000000000 --- a/docs/AI-ML-datasets/get-dataset-openimages-calibration/index.md +++ /dev/null @@ -1,178 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-openimages-calibration** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-openimages-calibration,27228976bb084dd0) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openimages,calibration* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset openimages calibration" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,openimages,calibration` - -`cm run script --tags=get,dataset,openimages,calibration[,variations] ` - -*or* - -`cmr "get dataset openimages calibration"` - -`cmr "get dataset openimages calibration [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,openimages,calibration' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,openimages,calibration"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset openimages calibration[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_filter` - - Environment variables: - - *CM_CALIBRATE_FILTER*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,openimages,dataset,original,_calibration - - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) - -
    - - - * Group "**calibration-option**" -
    - Click here to expand this section. - - * **`_mlperf.option1`** (default) - - Environment variables: - - *CM_MLPERF_OPENIMAGES_CALIBRATION_OPTION*: `one` - - *CM_DOWNLOAD_CHECKSUM1*: `f09719174af3553119e2c621157773a6` - - Workflow: - -
    - - - * Group "**filter-size**" -
    - Click here to expand this section. - - * `_filter-size.#` - - Environment variables: - - *CM_CALIBRATION_FILTER_SIZE*: `#` - - Workflow: - * `_filter-size.400` - - Environment variables: - - *CM_CALIBRATION_FILTER_SIZE*: `400` - - Workflow: - -
    - - -#### Default variations - -`_mlperf.option1` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/_cm.yaml)*** - * download,file - * CM names: `--adr.['calibration-file-downloader']...` - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/_cm.yaml) - 1. ***Run native script if exists*** - * [run-filter.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/run-filter.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages-calibration/_cm.yaml) - -___ -### Script output -`cmr "get dataset openimages calibration [,variations]" -j` -#### New environment keys (filter) - -* `CM_MLPERF_OPENIMAGES_CALIBRATION_LIST_FILE_WITH_PATH` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_OPENIMAGES_CALIBRATION_LIST_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-openimages/index.md b/docs/AI-ML-datasets/get-dataset-openimages/index.md deleted file mode 100644 index a5d30a4b0..000000000 --- a/docs/AI-ML-datasets/get-dataset-openimages/index.md +++ /dev/null @@ -1,250 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-openimages** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-openimages,0a9d49b644cf4142) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openimages,open-images,object-detection,original* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset openimages open-images object-detection original" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,openimages,open-images,object-detection,original` - -`cm run script --tags=get,dataset,openimages,open-images,object-detection,original[,variations] ` - -*or* - -`cmr "get dataset openimages open-images object-detection original"` - -`cmr "get dataset openimages open-images object-detection original [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,openimages,open-images,object-detection,original' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,openimages,open-images,object-detection,original"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset openimages open-images object-detection original[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_filter` - - Workflow: - * `_filter,calibration` - - Workflow: - * `_filter-size.#` - - Workflow: - * `_using-fiftyone` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_fiftyone - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,openssl,lib - - CM script: [get-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-openssl) - -
    - - - * Group "**annotations**" -
    - Click here to expand this section. - - * `_custom-annotations` - - Environment variables: - - *CM_DATASET_OPENIMAGES_CUSTOM_ANNOTATIONS*: `yes` - - Workflow: - * **`_default-annotations`** (default) - - Environment variables: - - *CM_DATASET_OPENIMAGES_CUSTOM_ANNOTATIONS*: `no` - - Workflow: - -
    - - - * Group "**dataset-type**" -
    - Click here to expand this section. - - * `_calibration` - - Environment variables: - - *CM_DATASET_CALIBRATION*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,openimages,calibration - * CM names: `--adr.['openimages-calibration']...` - - CM script: [get-dataset-openimages-calibration](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-calibration) - * **`_validation`** (default) - - Environment variables: - - *CM_DATASET_CALIBRATION*: `no` - - Workflow: - -
    - - - * Group "**size**" -
    - Click here to expand this section. - - * **`_50`** (default) - - Environment variables: - - *CM_DATASET_SIZE*: `50` - - Workflow: - * `_500` - - Environment variables: - - *CM_DATASET_SIZE*: `500` - - Workflow: - * `_full` - - Environment variables: - - *CM_DATASET_SIZE*: `` - - Workflow: - * `_size.#` - - Environment variables: - - *CM_DATASET_SIZE*: `#` - - Workflow: - -
    - - -#### Default variations - -`_50,_default-annotations,_validation` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_DATASET_CALIBRATION: `no` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/_cm.json)*** - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_requests - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * mlperf,inference,source - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,generic-python-lib,_boto3 - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_tqdm - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_opencv-python - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pandas - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pycocotools - * CM names: `--adr.['pycocotools']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/run.sh) - 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/_cm.json)*** - * get,openimages,annotations - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DATASET_OPENIMAGES_CUSTOM_ANNOTATIONS': ['yes']}` - - CM script: [get-dataset-openimages-annotations](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-annotations) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openimages/_cm.json) - -___ -### Script output -`cmr "get dataset openimages open-images object-detection original [,variations]" -j` -#### New environment keys (filter) - -* `CM_CALIBRATION_DATASET_PATH` -* `CM_DATASET_ANNOTATIONS_DIR_PATH` -* `CM_DATASET_ANNOTATIONS_FILE_PATH` -* `CM_DATASET_CALIBRATION_ANNOTATIONS_FILE_PATH` -* `CM_DATASET_PATH` -* `CM_DATASET_PATH_ROOT` -* `CM_DATASET_VALIDATION_ANNOTATIONS_FILE_PATH` -#### New environment keys auto-detected from customize - -* `CM_CALIBRATION_DATASET_PATH` -* `CM_DATASET_ANNOTATIONS_DIR_PATH` -* `CM_DATASET_ANNOTATIONS_FILE_PATH` -* `CM_DATASET_CALIBRATION_ANNOTATIONS_FILE_PATH` -* `CM_DATASET_PATH` -* `CM_DATASET_PATH_ROOT` -* `CM_DATASET_VALIDATION_ANNOTATIONS_FILE_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-openorca/index.md b/docs/AI-ML-datasets/get-dataset-openorca/index.md deleted file mode 100644 index 982a9c9c6..000000000 --- a/docs/AI-ML-datasets/get-dataset-openorca/index.md +++ /dev/null @@ -1,173 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-openorca** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-openorca,9252c4d90d5940b7) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openorca,language-processing,original* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset openorca language-processing original" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,openorca,language-processing,original` - -`cm run script --tags=get,dataset,openorca,language-processing,original[,variations] ` - -*or* - -`cmr "get dataset openorca language-processing original"` - -`cmr "get dataset openorca language-processing original [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,openorca,language-processing,original' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,openorca,language-processing,original"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset openorca language-processing original[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**dataset-type**" -
    - Click here to expand this section. - - * `_calibration` - - Environment variables: - - *CM_DATASET_CALIBRATION*: `yes` - - Workflow: - * **`_validation`** (default) - - Environment variables: - - *CM_DATASET_CALIBRATION*: `no` - - Workflow: - -
    - - - * Group "**size**" -
    - Click here to expand this section. - - * `_500` - - Environment variables: - - *CM_DATASET_SIZE*: `500` - - Workflow: - * **`_60`** (default) - - Environment variables: - - *CM_DATASET_SIZE*: `60` - - Workflow: - * `_full` - - Environment variables: - - *CM_DATASET_SIZE*: `24576` - - Workflow: - * `_size.#` - - Environment variables: - - *CM_DATASET_SIZE*: `#` - - Workflow: - -
    - - -#### Default variations - -`_60,_validation` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_DATASET_CALIBRATION: `no` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/_cm.json)*** - * get,git,repo,_lfs,_repo.https://huggingface.co/datasets/Open-Orca/OpenOrca - * CM names: `--adr.['openorca-src']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-openorca/_cm.json) - -___ -### Script output -`cmr "get dataset openorca language-processing original [,variations]" -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -#### New environment keys auto-detected from customize - -* `CM_DATASET_OPENORCA_PARQUET` -* `CM_DATASET_PATH` -* `CM_DATASET_PATH_ROOT` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-squad-vocab/index.md b/docs/AI-ML-datasets/get-dataset-squad-vocab/index.md deleted file mode 100644 index 1152f2292..000000000 --- a/docs/AI-ML-datasets/get-dataset-squad-vocab/index.md +++ /dev/null @@ -1,142 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-squad-vocab** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-squad-vocab,e38874fff5094577) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get aux dataset-aux language-processing squad-aux vocab squad-vocab" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab` - -`cm run script --tags=get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab[,variations] ` - -*or* - -`cmr "get aux dataset-aux language-processing squad-aux vocab squad-vocab"` - -`cmr "get aux dataset-aux language-processing squad-aux vocab squad-vocab [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,aux,dataset-aux,language-processing,squad-aux,vocab,squad-vocab"``` - -#### Run this script via Docker (beta) - -`cm docker script "get aux dataset-aux language-processing squad-aux vocab squad-vocab[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**download-source**" -
    - Click here to expand this section. - - * **`_from.zenodo`** (default) - - Environment variables: - - *CM_WGET_URL*: `https://zenodo.org/record/3733868/files/vocab.txt` - - Workflow: - -
    - - -#### Default variations - -`_from.zenodo` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad-vocab/_cm.json) - -___ -### Script output -`cmr "get aux dataset-aux language-processing squad-aux vocab squad-vocab [,variations]" -j` -#### New environment keys (filter) - -* `CM_DATASET_SQUAD_VOCAB_PATH` -* `CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH` -#### New environment keys auto-detected from customize - -* `CM_DATASET_SQUAD_VOCAB_PATH` -* `CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-dataset-squad/index.md b/docs/AI-ML-datasets/get-dataset-squad/index.md deleted file mode 100644 index a7f1a5595..000000000 --- a/docs/AI-ML-datasets/get-dataset-squad/index.md +++ /dev/null @@ -1,129 +0,0 @@ -Automatically generated README for this automation recipe: **get-dataset-squad** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dataset-squad,6651c119c3ae49b3) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,squad,language-processing,validation,original* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset squad language-processing validation original" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,squad,language-processing,validation,original` - -`cm run script --tags=get,dataset,squad,language-processing,validation,original ` - -*or* - -`cmr "get dataset squad language-processing validation original"` - -`cmr "get dataset squad language-processing validation original " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,squad,language-processing,validation,original' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,squad,language-processing,validation,original"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset squad language-processing validation original" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `1.1` - -* `1.1` -* `2.0` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/_cm.json)*** - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dataset-squad/_cm.json) - -___ -### Script output -`cmr "get dataset squad language-processing validation original " -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -#### New environment keys auto-detected from customize - -* `CM_DATASET_PATH` -* `CM_DATASET_SQUAD_PATH` -* `CM_DATASET_SQUAD_VAL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-criteo/index.md b/docs/AI-ML-datasets/get-preprocessed-dataset-criteo/index.md deleted file mode 100644 index fec163969..000000000 --- a/docs/AI-ML-datasets/get-preprocessed-dataset-criteo/index.md +++ /dev/null @@ -1,226 +0,0 @@ -Automatically generated README for this automation recipe: **get-preprocessed-dataset-criteo** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-criteo,afa59956272a4ba4) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,criteo,recommendation,dlrm,preprocessed* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset criteo recommendation dlrm preprocessed" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,criteo,recommendation,dlrm,preprocessed` - -`cm run script --tags=get,dataset,criteo,recommendation,dlrm,preprocessed[,variations] [--input_flags]` - -*or* - -`cmr "get dataset criteo recommendation dlrm preprocessed"` - -`cmr "get dataset criteo recommendation dlrm preprocessed [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,criteo,recommendation,dlrm,preprocessed' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,criteo,recommendation,dlrm,preprocessed"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset criteo recommendation dlrm preprocessed[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_1` - - Environment variables: - - *CM_DATASET_SIZE*: `1` - - Workflow: - * `_50` - - Environment variables: - - *CM_DATASET_SIZE*: `50` - - Workflow: - * `_fake` - - Environment variables: - - *CM_CRITEO_FAKE*: `yes` - - Workflow: - * `_full` - - Workflow: - * `_validation` - - Workflow: - -
    - - - * Group "**type**" -
    - Click here to expand this section. - - * **`_multihot`** (default) - - Environment variables: - - *CM_DATASET_CRITEO_MULTIHOT*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,mlperf,training,src - * CM names: `--adr.['mlperf-training', 'training-src']...` - - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) - * get,generic-python-lib,_package.typing_inspect - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.iopath - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.fbgemm_gpu - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.torchrec - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.pyre_extensions - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - -#### Default variations - -`_multihot` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` -* `--output_dir=value` → `CM_DATASET_PREPROCESSED_OUTPUT_PATH=value` -* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/_cm.json)*** - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,dataset,criteo,original - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_DATASET_PREPROCESSED_PATH': ['on']}` - * CM names: `--adr.['original-dataset', 'criteo-dataset']...` - - CM script: [get-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-criteo) - * get,dlrm,src - * CM names: `--adr.['dlrm-src']...` - - CM script: [get-dlrm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dlrm) - * mlperf,mlcommons,inference,source,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,generic-python-lib,_scikit-learn - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_opencv-python - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_decorator - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_psutil - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnx - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_tqdm - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_mlperf_logging - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/_cm.json) - 1. ***Run native script if exists*** - * [run-multihot.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/run-multihot.sh) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-criteo/_cm.json) - -___ -### Script output -`cmr "get dataset criteo recommendation dlrm preprocessed [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -#### New environment keys auto-detected from customize - -* `CM_DATASET_PREPROCESSED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-generic/index.md b/docs/AI-ML-datasets/get-preprocessed-dataset-generic/index.md deleted file mode 100644 index f6ecaad04..000000000 --- a/docs/AI-ML-datasets/get-preprocessed-dataset-generic/index.md +++ /dev/null @@ -1,117 +0,0 @@ -Automatically generated README for this automation recipe: **get-preprocesser-script-generic** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocesser-script-generic,d5e603627e2046eb) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,preprocessor,generic,image-preprocessor,script* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get preprocessor generic image-preprocessor script" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,preprocessor,generic,image-preprocessor,script` - -`cm run script --tags=get,preprocessor,generic,image-preprocessor,script ` - -*or* - -`cmr "get preprocessor generic image-preprocessor script"` - -`cmr "get preprocessor generic image-preprocessor script " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,preprocessor,generic,image-preprocessor,script' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,preprocessor,generic,image-preprocessor,script"``` - -#### Run this script via Docker (beta) - -`cm docker script "get preprocessor generic image-preprocessor script" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocesser-script-generic/_cm.json) - -___ -### Script output -`cmr "get preprocessor generic image-preprocessor script " -j` -#### New environment keys (filter) - -* `+PYTHONPATH` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-imagenet/index.md b/docs/AI-ML-datasets/get-preprocessed-dataset-imagenet/index.md deleted file mode 100644 index 6c557299a..000000000 --- a/docs/AI-ML-datasets/get-preprocessed-dataset-imagenet/index.md +++ /dev/null @@ -1,456 +0,0 @@ -Automatically generated README for this automation recipe: **get-preprocessed-dataset-imagenet** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-imagenet,f259d490bbaf45f5) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,imagenet,ILSVRC,image-classification,preprocessed* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset imagenet ILSVRC image-classification preprocessed" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,imagenet,ILSVRC,image-classification,preprocessed` - -`cm run script --tags=get,dataset,imagenet,ILSVRC,image-classification,preprocessed[,variations] [--input_flags]` - -*or* - -`cmr "get dataset imagenet ILSVRC image-classification preprocessed"` - -`cmr "get dataset imagenet ILSVRC image-classification preprocessed [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,imagenet,ILSVRC,image-classification,preprocessed' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,imagenet,ILSVRC,image-classification,preprocessed"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset imagenet ILSVRC image-classification preprocessed[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *Internal group (variations should not be selected manually)* -
    - Click here to expand this section. - - * `_mobilenet_` - - Environment variables: - - *CM_MODEL*: `mobilenet` - - Workflow: - * `_resnet50_` - - Environment variables: - - *CM_MODEL*: `resnet50` - - Workflow: - -
    - - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_500,validation` - - Workflow: - * `_default` - - Workflow: - * `_for.mobilenet,float32` - - Environment variables: - - *CM_DATASET_QUANTIZE*: `0` - - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `` - - *CM_DATASET_NORMALIZE_DATA*: `1` - - *CM_DATASET_SUBTRACT_MEANS*: `0` - - Workflow: - * `_for.mobilenet,rgb8` - - Environment variables: - - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `` - - *CM_DATASET_SUBTRACT_MEANS*: `0` - - *CM_DATASET_QUANTIZE*: `0` - - *CM_DATASET_NORMALIZE_DATA*: `0` - - *CM_DATASET_DATA_TYPE*: `uint8` - - Workflow: - * `_for.resnet50,float32` - - Workflow: - * `_for.resnet50,rgb8` - - Environment variables: - - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `` - - *CM_DATASET_SUBTRACT_MEANS*: `0` - - *CM_DATASET_NORMALIZE_DATA*: `0` - - *CM_DATASET_QUANTIZE*: `0` - - *CM_DATASET_DATA_TYPE*: `uint8` - - Workflow: - * `_for.resnet50,rgb8,uint8` - - Environment variables: - - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `123.68 116.78 103.94` - - *CM_DATASET_SUBTRACT_MEANS*: `1` - - *CM_DATASET_QUANTIZE*: `1` - - Workflow: - * `_for.resnet50,uint8` - - Environment variables: - - *CM_DATASET_QUANT_SCALE*: `1.18944883` - - *CM_DATASET_QUANT_OFFSET*: `0` - - Workflow: - * `_pytorch` - - Environment variables: - - *CM_PREPROCESS_PYTORCH*: `yes` - - *CM_MODEL*: `resnet50` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_torchvision - * CM names: `--adr.['torchvision']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_tflite_tpu` - - Environment variables: - - *CM_MODEL*: `resnet50` - - *CM_PREPROCESS_TFLITE_TPU*: `yes` - - Workflow: - -
    - - - * Group "**calibration-option**" -
    - Click here to expand this section. - - * `_mlperf.option1` - - Environment variables: - - *CM_DATASET_CALIBRATION_OPTION*: `one` - - Workflow: - * `_mlperf.option2` - - Environment variables: - - *CM_DATASET_CALIBRATION_OPTION*: `two` - - Workflow: - -
    - - - * Group "**dataset-type**" -
    - Click here to expand this section. - - * `_calibration` - - Environment variables: - - *CM_DATASET_TYPE*: `calibration` - - Workflow: - * **`_validation`** (default) - - Environment variables: - - *CM_DATASET_TYPE*: `validation` - - Workflow: - -
    - - - * Group "**extension**" -
    - Click here to expand this section. - - * `_rgb32` - - Environment variables: - - *CM_DATASET_PREPROCESSED_EXTENSION*: `rgb32` - - Workflow: - * `_rgb8` - - Environment variables: - - *CM_DATASET_PREPROCESSED_EXTENSION*: `rgb8` - - Workflow: - -
    - - - * Group "**interpolation-method**" -
    - Click here to expand this section. - - * `_inter.area` - - Environment variables: - - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_AREA` - - Workflow: - * `_inter.linear` - - Environment variables: - - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_LINEAR` - - Workflow: - -
    - - - * Group "**layout**" -
    - Click here to expand this section. - - * **`_NCHW`** (default) - - Environment variables: - - *CM_DATASET_DATA_LAYOUT*: `NCHW` - - Workflow: - * `_NHWC` - - Environment variables: - - *CM_DATASET_DATA_LAYOUT*: `NHWC` - - Workflow: - -
    - - - * Group "**model**" -
    - Click here to expand this section. - - * `_for.mobilenet` - - Workflow: - * `_for.resnet50` - - Environment variables: - - *CM_DATASET_SUBTRACT_MEANS*: `1` - - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `123.68 116.78 103.94` - - *CM_DATASET_NORMALIZE_DATA*: `0` - - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_AREA` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * `_float32` - - Environment variables: - - *CM_DATASET_DATA_TYPE*: `float32` - - *CM_DATASET_QUANTIZE*: `0` - - *CM_DATASET_CONVERT_TO_UNSIGNED*: `0` - - Workflow: - * `_int8` - - Environment variables: - - *CM_DATASET_DATA_TYPE*: `int8` - - *CM_DATASET_QUANTIZE*: `1` - - *CM_DATASET_CONVERT_TO_UNSIGNED*: `0` - - Workflow: - * `_uint8` - - Environment variables: - - *CM_DATASET_DATA_TYPE*: `uint8` - - *CM_DATASET_DATA_TYPE_INPUT*: `float32` - - *CM_DATASET_QUANTIZE*: `1` - - *CM_DATASET_CONVERT_TO_UNSIGNED*: `1` - - Workflow: - -
    - - - * Group "**preprocessing-source**" -
    - Click here to expand this section. - - * `_generic-preprocessor` - - Environment variables: - - *CM_DATASET_REFERENCE_PREPROCESSOR*: `0` - - Workflow: - 1. ***Read "prehook_deps" on other CM scripts*** - * get,generic,image-preprocessor - - CM script: [get-preprocesser-script-generic](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocesser-script-generic) - * **`_mlcommons-reference-preprocessor`** (default) - - Environment variables: - - *CM_DATASET_REFERENCE_PREPROCESSOR*: `1` - - Workflow: - -
    - - - * Group "**resolution**" -
    - Click here to expand this section. - - * `_resolution.#` - - Environment variables: - - *CM_DATASET_INPUT_SQUARE_SIDE*: `#` - - Workflow: - * **`_resolution.224`** (default) - - Environment variables: - - *CM_DATASET_INPUT_SQUARE_SIDE*: `224` - - Workflow: - -
    - - - * Group "**size**" -
    - Click here to expand this section. - - * `_1` - - Environment variables: - - *CM_DATASET_SIZE*: `1` - - Workflow: - * `_500` - - Environment variables: - - *CM_DATASET_SIZE*: `500` - - Workflow: - * `_full` - - Environment variables: - - *CM_DATASET_SIZE*: `50000` - - Workflow: - * `_size.#` - - Environment variables: - - *CM_DATASET_SIZE*: `#` - - Workflow: - -
    - - -#### Default variations - -`_NCHW,_mlcommons-reference-preprocessor,_resolution.224,_validation` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` -* `--imagenet_path=value` → `CM_IMAGENET_PATH=value` -* `--imagenet_preprocessed_path=value` → `CM_IMAGENET_PREPROCESSED_PATH=value` -* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_DATASET_CROP_FACTOR: `87.5` -* CM_DATASET_DATA_TYPE: `float32` -* CM_DATASET_DATA_LAYOUT: `NCHW` -* CM_DATASET_QUANT_SCALE: `1` -* CM_DATASET_QUANTIZE: `0` -* CM_DATASET_QUANT_OFFSET: `0` -* CM_DATASET_PREPROCESSED_EXTENSION: `npy` -* CM_DATASET_CONVERT_TO_UNSIGNED: `0` -* CM_DATASET_REFERENCE_PREPROCESSOR: `1` -* CM_PREPROCESS_VGG: `yes` -* CM_MODEL: `resnet50` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/_cm.json)*** - * get,python3 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_IMAGENET_PREPROCESSED_PATH': ['on']}` - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,dataset,image-classification,original - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_IMAGENET_PREPROCESSED_PATH': ['on']}` - * CM names: `--adr.['original-dataset']...` - - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) - * get,dataset-aux,image-classification,imagenet-aux - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DATASET_TYPE': ['validation']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_IMAGENET_PREPROCESSED_PATH': ['on']}` - - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) - * get,dataset,imagenet,calibration - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DATASET_TYPE': ['calibration']}` - - CM script: [get-dataset-imagenet-calibration](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-calibration) - * get,generic-python-lib,_package.opencv-python-headless - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pillow - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * mlperf,mlcommons,inference,source,src - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DATASET_REFERENCE_PREPROCESSOR': ['1']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_IMAGENET_PREPROCESSED_PATH': ['on']}` - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-imagenet/_cm.json) - -___ -### Script output -`cmr "get dataset imagenet ILSVRC image-classification preprocessed [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -#### New environment keys auto-detected from customize - -* `CM_DATASET_DATA_TYPE_INPUT` -* `CM_DATASET_IMAGES_LIST` -* `CM_DATASET_PREPROCESSED_IMAGENAMES_LIST` -* `CM_DATASET_PREPROCESSED_IMAGES_LIST` -* `CM_DATASET_PREPROCESSED_PATH` -* `CM_DATASET_SIZE` -* `CM_DATASET_TYPE` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-kits19/index.md b/docs/AI-ML-datasets/get-preprocessed-dataset-kits19/index.md deleted file mode 100644 index 35e4a05b2..000000000 --- a/docs/AI-ML-datasets/get-preprocessed-dataset-kits19/index.md +++ /dev/null @@ -1,232 +0,0 @@ -Automatically generated README for this automation recipe: **get-preprocessed-dataset-kits19** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-kits19,2094d9b9ab6c4c9e) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,medical-imaging,kits19,preprocessed* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset medical-imaging kits19 preprocessed" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,medical-imaging,kits19,preprocessed` - -`cm run script --tags=get,dataset,medical-imaging,kits19,preprocessed[,variations] [--input_flags]` - -*or* - -`cmr "get dataset medical-imaging kits19 preprocessed"` - -`cmr "get dataset medical-imaging kits19 preprocessed [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,medical-imaging,kits19,preprocessed' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,medical-imaging,kits19,preprocessed"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset medical-imaging kits19 preprocessed[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_nvidia` - - Environment variables: - - *CM_PREPROCESSING_BY_NVIDIA*: `yes` - - Workflow: - -
    - - - * Group "**dataset-count**" -
    - Click here to expand this section. - - * `_1` - - Environment variables: - - *CM_DATASET_SIZE*: `1` - - Workflow: - * `_5` - - Environment variables: - - *CM_DATASET_SIZE*: `5` - - Workflow: - * `_50` - - Environment variables: - - *CM_DATASET_SIZE*: `50` - - Workflow: - * `_500` - - Environment variables: - - *CM_DATASET_SIZE*: `500` - - Workflow: - * `_full` - - Environment variables: - - *CM_DATASET_SIZE*: `` - - Workflow: - -
    - - - * Group "**dataset-precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_DATASET_DTYPE*: `fp32` - - Workflow: - * `_int8` - - Environment variables: - - *CM_DATASET_DTYPE*: `int8` - - Workflow: - -
    - - - * Group "**dataset-type**" -
    - Click here to expand this section. - - * `_calibration` - - Environment variables: - - *CM_DATASET_PATH*: `<<>>` - - Workflow: - * **`_validation`** (default) - - Workflow: - -
    - - -#### Default variations - -`_fp32,_validation` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` -* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_DATASET: `kits19` -* CM_DATASET_DTYPE: `fp32` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/_cm.json)*** - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,dataset,medical-imaging,kits19,original - * CM names: `--adr.['original-dataset']...` - - CM script: [get-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-kits19) - * mlperf,mlcommons,inference,source,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,generic-python-lib,_scipy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_nibabel - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_numpy - * CM names: `--adr.['numpy']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-kits19/_cm.json) - -___ -### Script output -`cmr "get dataset medical-imaging kits19 preprocessed [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -#### New environment keys auto-detected from customize - -* `CM_DATASET_PREPROCESSED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-librispeech/index.md b/docs/AI-ML-datasets/get-preprocessed-dataset-librispeech/index.md deleted file mode 100644 index 875bcf494..000000000 --- a/docs/AI-ML-datasets/get-preprocessed-dataset-librispeech/index.md +++ /dev/null @@ -1,222 +0,0 @@ -Automatically generated README for this automation recipe: **get-preprocessed-dataset-librispeech** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-librispeech,e9f62fc969d5483a) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,speech-recognition,librispeech,preprocessed* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset speech-recognition librispeech preprocessed" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,speech-recognition,librispeech,preprocessed` - -`cm run script --tags=get,dataset,speech-recognition,librispeech,preprocessed[,variations] [--input_flags]` - -*or* - -`cmr "get dataset speech-recognition librispeech preprocessed"` - -`cmr "get dataset speech-recognition librispeech preprocessed [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,speech-recognition,librispeech,preprocessed' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,speech-recognition,librispeech,preprocessed"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset speech-recognition librispeech preprocessed[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**dataset-count**" -
    - Click here to expand this section. - - * `_1` - - Environment variables: - - *CM_DATASET_SIZE*: `1` - - Workflow: - * `_5` - - Environment variables: - - *CM_DATASET_SIZE*: `5` - - Workflow: - * `_50` - - Environment variables: - - *CM_DATASET_SIZE*: `50` - - Workflow: - * `_500` - - Environment variables: - - *CM_DATASET_SIZE*: `500` - - Workflow: - * `_full` - - Environment variables: - - *CM_DATASET_SIZE*: `` - - Workflow: - -
    - - - * Group "**dataset-precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_DATASET_DTYPE*: `fp32` - - Workflow: - * `_int8` - - Environment variables: - - *CM_DATASET_DTYPE*: `int8` - - Workflow: - -
    - - - * Group "**dataset-type**" -
    - Click here to expand this section. - - * `_calibration` - - Environment variables: - - *CM_DATASET_PATH*: `<<>>` - - Workflow: - * **`_validation`** (default) - - Workflow: - -
    - - -#### Default variations - -`_fp32,_validation` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` -* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_DATASET: `kits19` -* CM_DATASET_DTYPE: `fp32` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/_cm.json)*** - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,dataset,speech-recognition,librispeech,original - * CM names: `--adr.['original-dataset']...` - - CM script: [get-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-librispeech) - * mlperf,mlcommons,inference,source,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,generic-python-lib,_sox - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pandas - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_tqdm - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,sys-util,generic,_sox - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-librispeech/_cm.json) - -___ -### Script output -`cmr "get dataset speech-recognition librispeech preprocessed [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -#### New environment keys auto-detected from customize - -* `CM_DATASET_PREPROCESSED_JSON` -* `CM_DATASET_PREPROCESSED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-openimages/index.md b/docs/AI-ML-datasets/get-preprocessed-dataset-openimages/index.md deleted file mode 100644 index 84ee7e534..000000000 --- a/docs/AI-ML-datasets/get-preprocessed-dataset-openimages/index.md +++ /dev/null @@ -1,401 +0,0 @@ -Automatically generated README for this automation recipe: **get-preprocessed-dataset-openimages** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-openimages,9842f1be8cba4c7b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openimages,open-images,object-detection,preprocessed* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset openimages open-images object-detection preprocessed" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,openimages,open-images,object-detection,preprocessed` - -`cm run script --tags=get,dataset,openimages,open-images,object-detection,preprocessed[,variations] [--input_flags]` - -*or* - -`cmr "get dataset openimages open-images object-detection preprocessed"` - -`cmr "get dataset openimages open-images object-detection preprocessed [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,openimages,open-images,object-detection,preprocessed' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,openimages,open-images,object-detection,preprocessed"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset openimages open-images object-detection preprocessed[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_filter` - - Workflow: - * `_filter,calibration` - - Environment variables: - - *CM_DATASET_CALIBRATION_FILTER*: `yes` - - Workflow: - * `_for.retinanet.onnx` - - Environment variables: - - *CM_ML_MODEL_NAME*: `retinanet` - - *CM_DATASET_SUBTRACT_MEANS*: `1` - - *CM_DATASET_GIVEN_CHANNEL_MEANS*: `0.485 0.456 0.406` - - *CM_DATASET_GIVEN_CHANNEL_STDS*: `0.229 0.224 0.225` - - *CM_DATASET_NORMALIZE_DATA*: `0` - - *CM_DATASET_NORMALIZE_LOWER*: `0.0` - - *CM_DATASET_NORMALIZE_UPPER*: `1.0` - - *CM_DATASET_CONVERT_TO_BGR*: `0` - - *CM_DATASET_CROP_FACTOR*: `100.0` - - Workflow: - * `_for.retinanet.onnx,fp32` - - Workflow: - * `_for.retinanet.onnx,uint8` - - Environment variables: - - *CM_DATASET_QUANT_SCALE*: `0.0186584499` - - *CM_DATASET_QUANT_OFFSET*: `114` - - Workflow: - * `_full,validation` - - Environment variables: - - *CM_DATASET_SIZE*: `24781` - - Workflow: - * `_nvidia` - - Environment variables: - - *CM_PREPROCESSING_BY_NVIDIA*: `yes` - - Workflow: - * `_quant-offset.#` - - Workflow: - * `_quant-scale.#` - - Workflow: - -
    - - - * Group "**annotations**" -
    - Click here to expand this section. - - * `_custom-annotations` - - Workflow: - * **`_default-annotations`** (default) - - Workflow: - -
    - - - * Group "**dataset-count**" -
    - Click here to expand this section. - - * **`_50`** (default) - - Environment variables: - - *CM_DATASET_SIZE*: `50` - - Workflow: - * `_500` - - Environment variables: - - *CM_DATASET_SIZE*: `500` - - Workflow: - * `_full` - - Workflow: - * `_size.#` - - Environment variables: - - *CM_DATASET_SIZE*: `#` - - Workflow: - -
    - - - * Group "**dataset-layout**" -
    - Click here to expand this section. - - * **`_NCHW`** (default) - - Environment variables: - - *CM_DATASET_DATA_LAYOUT*: `NCHW` - - Workflow: - * `_NHWC` - - Environment variables: - - *CM_DATASET_DATA_LAYOUT*: `NHWC` - - Workflow: - -
    - - - * Group "**dataset-precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_DATASET_DTYPE*: `fp32` - - *CM_DATASET_INPUT_DTYPE*: `fp32` - - *CM_DATASET_QUANTIZE*: `0` - - *CM_DATASET_CONVERT_TO_UNSIGNED*: `0` - - Workflow: - * `_int8` - - Environment variables: - - *CM_DATASET_DTYPE*: `int8` - - *CM_DATASET_INPUT_DTYPE*: `fp32` - - *CM_DATASET_QUANTIZE*: `1` - - *CM_DATASET_CONVERT_TO_UNSIGNED*: `0` - - Workflow: - * `_uint8` - - Environment variables: - - *CM_DATASET_DTYPE*: `uint8` - - *CM_DATASET_INPUT_DTYPE*: `fp32` - - *CM_DATASET_QUANTIZE*: `1` - - *CM_DATASET_CONVERT_TO_UNSIGNED*: `1` - - Workflow: - -
    - - - * Group "**dataset-type**" -
    - Click here to expand this section. - - * `_calibration` - - Environment variables: - - *CM_DATASET_PATH*: `<<>>` - - *CM_DATASET_ANNOTATIONS_FILE_PATH*: `<<>>` - - *CM_DATASET_TYPE*: `calibration` - - Workflow: - * **`_validation`** (default) - - Environment variables: - - *CM_DATASET_TYPE*: `validation` - - Workflow: - -
    - - - * Group "**extension**" -
    - Click here to expand this section. - - * `_npy` - - Environment variables: - - *CM_DATASET_PREPROCESSED_EXTENSION*: `npy` - - Workflow: - * `_raw` - - Environment variables: - - *CM_DATASET_PREPROCESSED_EXTENSION*: `raw` - - Workflow: - * `_rgb32` - - Environment variables: - - *CM_DATASET_PREPROCESSED_EXTENSION*: `rgb32` - - Workflow: - * `_rgb8` - - Environment variables: - - *CM_DATASET_PREPROCESSED_EXTENSION*: `rgb8` - - Workflow: - -
    - - - * Group "**filter-size**" -
    - Click here to expand this section. - - * `_filter-size.#` - - Workflow: - -
    - - - * Group "**interpolation-method**" -
    - Click here to expand this section. - - * `_inter.area` - - Environment variables: - - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_AREA` - - Workflow: - * `_inter.linear` - - Environment variables: - - *CM_DATASET_INTERPOLATION_METHOD*: `INTER_LINEAR` - - Workflow: - -
    - - - * Group "**preprocessing-source**" -
    - Click here to expand this section. - - * `_generic-preprocessor` - - Environment variables: - - *CM_DATASET_REFERENCE_PREPROCESSOR*: `0` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_torch - * CM names: `--adr.['torch', 'pytorch']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchvision - * CM names: `--adr.['torchvision']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Read "prehook_deps" on other CM scripts*** - * get,generic,image-preprocessor - - CM script: [get-preprocesser-script-generic](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocesser-script-generic) - * **`_mlcommons-reference-preprocessor`** (default) - - Environment variables: - - *CM_DATASET_REFERENCE_PREPROCESSOR*: `1` - - Workflow: - -
    - - -#### Default variations - -`_50,_NCHW,_default-annotations,_fp32,_mlcommons-reference-preprocessor,_validation` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--dir=value` → `CM_DATASET_PREPROCESSED_PATH=value` -* `--threads=value` → `CM_NUM_PREPROCESS_THREADS=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_DATASET: `OPENIMAGES` -* CM_DATASET_DTYPE: `fp32` -* CM_DATASET_INPUT_SQUARE_SIDE: `800` -* CM_DATASET_CROP_FACTOR: `100.0` -* CM_DATASET_QUANT_SCALE: `1` -* CM_DATASET_QUANTIZE: `0` -* CM_DATASET_QUANT_OFFSET: `0` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/_cm.json)*** - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,dataset,object-detection,openimages,original - * CM names: `--adr.['original-dataset']...` - - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) - * mlperf,mlcommons,inference,source,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,generic-python-lib,_pycocotools - * CM names: `--adr.['pycocotools']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_opencv-python - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pillow - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.ujson - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_numpy - * CM names: `--adr.['numpy']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_numpy - * CM names: `--adr.['numpy']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openimages/_cm.json) - -___ -### Script output -`cmr "get dataset openimages open-images object-detection preprocessed [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -#### New environment keys auto-detected from customize - -* `CM_DATASET_ANNOTATIONS_DIR_PATH` -* `CM_DATASET_ANNOTATIONS_FILE_PATH` -* `CM_DATASET_PREPROCESSED_IMAGENAMES_LIST` -* `CM_DATASET_PREPROCESSED_IMAGES_LIST` -* `CM_DATASET_PREPROCESSED_PATH` -* `CM_DATASET_QUANT_OFFSET` -* `CM_DATASET_QUANT_SCALE` -* `CM_DATASET_TYPE` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-openorca/index.md b/docs/AI-ML-datasets/get-preprocessed-dataset-openorca/index.md deleted file mode 100644 index cd4e07dd9..000000000 --- a/docs/AI-ML-datasets/get-preprocessed-dataset-openorca/index.md +++ /dev/null @@ -1,178 +0,0 @@ -Automatically generated README for this automation recipe: **get-preprocessed-dataset-openorca** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-openorca,5614c39cb1564d72) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,openorca,language-processing,preprocessed* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset openorca language-processing preprocessed" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,openorca,language-processing,preprocessed` - -`cm run script --tags=get,dataset,openorca,language-processing,preprocessed[,variations] ` - -*or* - -`cmr "get dataset openorca language-processing preprocessed"` - -`cmr "get dataset openorca language-processing preprocessed [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,openorca,language-processing,preprocessed' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,openorca,language-processing,preprocessed"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset openorca language-processing preprocessed[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**dataset-type**" -
    - Click here to expand this section. - - * `_calibration` - - Environment variables: - - *CM_DATASET_CALIBRATION*: `yes` - - Workflow: - * **`_validation`** (default) - - Environment variables: - - *CM_DATASET_CALIBRATION*: `no` - - Workflow: - -
    - - - * Group "**size**" -
    - Click here to expand this section. - - * **`_60`** (default) - - Workflow: - * `_full` - - Workflow: - * `_size.#` - - Workflow: - -
    - - -#### Default variations - -`_60,_validation` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_DATASET_CALIBRATION: `no` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/_cm.json)*** - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,dataset,original,openorca - * CM names: `--adr.['openorca-original', 'dataset-original']...` - - CM script: [get-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openorca) - * mlperf,inference,source - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,generic-python-lib,_package.pyarrow - * CM names: `--adr.['pyarrow']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.fastparquet - * CM names: `--adr.['fastparquet']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,ml-model,llama2 - - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-openorca/_cm.json) - -___ -### Script output -`cmr "get dataset openorca language-processing preprocessed [,variations]" -j` -#### New environment keys (filter) - -* `CM_DATASET_PREPROCESSED_PATH` -#### New environment keys auto-detected from customize - -* `CM_DATASET_PREPROCESSED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-datasets/get-preprocessed-dataset-squad/index.md b/docs/AI-ML-datasets/get-preprocessed-dataset-squad/index.md deleted file mode 100644 index c7d80cfd0..000000000 --- a/docs/AI-ML-datasets/get-preprocessed-dataset-squad/index.md +++ /dev/null @@ -1,238 +0,0 @@ -Automatically generated README for this automation recipe: **get-preprocessed-dataset-squad** - -Category: **AI/ML datasets** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-preprocessed-dataset-squad,7cd1d9b7e8af4788) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *get,dataset,preprocessed,tokenized,squad* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get dataset preprocessed tokenized squad" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,dataset,preprocessed,tokenized,squad` - -`cm run script --tags=get,dataset,preprocessed,tokenized,squad[,variations] ` - -*or* - -`cmr "get dataset preprocessed tokenized squad"` - -`cmr "get dataset preprocessed tokenized squad [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,dataset,preprocessed,tokenized,squad' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,dataset,preprocessed,tokenized,squad"``` - -#### Run this script via Docker (beta) - -`cm docker script "get dataset preprocessed tokenized squad[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**calibration-set**" -
    - Click here to expand this section. - - * `_calib1` - - Environment variables: - - *CM_DATASET_SQUAD_CALIBRATION_SET*: `one` - - Workflow: - * `_calib2` - - Environment variables: - - *CM_DATASET_SQUAD_CALIBRATION_SET*: `two` - - Workflow: - * **`_no-calib`** (default) - - Environment variables: - - *CM_DATASET_SQUAD_CALIBRATION_SET*: `` - - Workflow: - -
    - - - * Group "**doc-stride**" -
    - Click here to expand this section. - - * `_doc-stride.#` - - Environment variables: - - *CM_DATASET_DOC_STRIDE*: `#` - - Workflow: - * **`_doc-stride.128`** (default) - - Environment variables: - - *CM_DATASET_DOC_STRIDE*: `128` - - Workflow: - -
    - - - * Group "**packing**" -
    - Click here to expand this section. - - * `_packed` - - Environment variables: - - *CM_DATASET_SQUAD_PACKED*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,preprocessed,squad,_pickle - - CM script: [get-preprocessed-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-squad) - -
    - - - * Group "**raw**" -
    - Click here to expand this section. - - * `_pickle` - - Environment variables: - - *CM_DATASET_RAW*: `no` - - Workflow: - * **`_raw`** (default) - - Environment variables: - - *CM_DATASET_RAW*: `yes` - - Workflow: - -
    - - - * Group "**seq-length**" -
    - Click here to expand this section. - - * `_seq-length.#` - - Environment variables: - - *CM_DATASET_MAX_SEQ_LENGTH*: `#` - - Workflow: - * **`_seq-length.384`** (default) - - Environment variables: - - *CM_DATASET_MAX_SEQ_LENGTH*: `384` - - Workflow: - -
    - - -#### Default variations - -`_doc-stride.128,_no-calib,_raw,_seq-length.384` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/_cm.yaml)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlperf,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,squad,dataset,original - * CM names: `--adr.['squad-dataset']...` - - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) - * get,squad,vocab - * CM names: `--adr.['squad-vocab']...` - - CM script: [get-bert-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bert-squad-vocab) - * get,generic-python-lib,_package.tokenization - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.transformers - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.tensorflow - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/_cm.yaml) - 1. ***Run native script if exists*** - * [run-packed.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/run-packed.sh) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-preprocessed-dataset-squad/_cm.yaml) - -___ -### Script output -`cmr "get dataset preprocessed tokenized squad [,variations]" -j` -#### New environment keys (filter) - -* `CM_DATASET_SQUAD_TOKENIZED_*` -#### New environment keys auto-detected from customize - -* `CM_DATASET_SQUAD_TOKENIZED_DOC_STRIDE` -* `CM_DATASET_SQUAD_TOKENIZED_INPUT_IDS` -* `CM_DATASET_SQUAD_TOKENIZED_INPUT_MASK` -* `CM_DATASET_SQUAD_TOKENIZED_MAX_QUERY_LENGTH` -* `CM_DATASET_SQUAD_TOKENIZED_MAX_SEQ_LENGTH` -* `CM_DATASET_SQUAD_TOKENIZED_PACKED_FILENAMES_FILE` -* `CM_DATASET_SQUAD_TOKENIZED_PICKLE_FILE` -* `CM_DATASET_SQUAD_TOKENIZED_ROOT` -* `CM_DATASET_SQUAD_TOKENIZED_SEGMENT_IDS` \ No newline at end of file diff --git a/docs/AI-ML-datasets/index.md b/docs/AI-ML-datasets/index.md new file mode 100644 index 000000000..c898f9a89 --- /dev/null +++ b/docs/AI-ML-datasets/index.md @@ -0,0 +1,29 @@ +The AI/ML datasets category contains the following scripts: + +- [get-croissant](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-croissant/README.md) +- [get-dataset-cifar10](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-cifar10/README.md) +- [get-dataset-cnndm](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-cnndm/README.md) +- [get-dataset-coco](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-coco/README.md) +- [get-dataset-coco2014](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-coco2014/README.md) +- [get-dataset-criteo](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-criteo/README.md) +- [get-dataset-imagenet-aux](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-imagenet-aux/README.md) +- [get-dataset-imagenet-calibration](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-imagenet-calibration/README.md) +- [get-dataset-imagenet-helper](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-imagenet-helper/README.md) +- [get-dataset-imagenet-train](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-imagenet-train/README.md) +- [get-dataset-imagenet-val](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-imagenet-val/README.md) +- [get-dataset-kits19](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-kits19/README.md) +- [get-dataset-librispeech](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-librispeech/README.md) +- [get-dataset-openimages](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-openimages/README.md) +- [get-dataset-openimages-annotations](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-openimages-annotations/README.md) +- [get-dataset-openimages-calibration](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-openimages-calibration/README.md) +- [get-dataset-openorca](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-openorca/README.md) +- [get-dataset-squad](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-squad/README.md) +- [get-dataset-squad-vocab](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dataset-squad-vocab/README.md) +- [get-preprocessed-dataset-criteo](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-preprocessed-dataset-criteo/README.md) +- [get-preprocessed-dataset-generic](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-preprocessed-dataset-generic/README.md) +- [get-preprocessed-dataset-imagenet](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-preprocessed-dataset-imagenet/README.md) +- [get-preprocessed-dataset-kits19](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-preprocessed-dataset-kits19/README.md) +- [get-preprocessed-dataset-librispeech](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-preprocessed-dataset-librispeech/README.md) +- [get-preprocessed-dataset-openimages](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-preprocessed-dataset-openimages/README.md) +- [get-preprocessed-dataset-openorca](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-preprocessed-dataset-openorca/README.md) +- [get-preprocessed-dataset-squad](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-preprocessed-dataset-squad/README.md) diff --git a/docs/AI-ML-frameworks/get-google-saxml/index.md b/docs/AI-ML-frameworks/get-google-saxml/index.md deleted file mode 100644 index 5a7e3d351..000000000 --- a/docs/AI-ML-frameworks/get-google-saxml/index.md +++ /dev/null @@ -1,133 +0,0 @@ -Automatically generated README for this automation recipe: **get-google-saxml** - -Category: **AI/ML frameworks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-google-saxml,5d7b17d84b5a48fb) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *get,google,saxml* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get google saxml" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,google,saxml` - -`cm run script --tags=get,google,saxml ` - -*or* - -`cmr "get google saxml"` - -`cmr "get google saxml " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,google,saxml' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,google,saxml"``` - -#### Run this script via Docker (beta) - -`cm docker script "get google saxml" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `master` - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,git,_repo.https://github.com/google/saxml - * CM names: `--adr.['google-saxml-git-src']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - * get,bazel - * CM names: `--adr.['bazel']...` - - CM script: [get-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bazel) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-saxml/_cm.yaml) - -___ -### Script output -`cmr "get google saxml " -j` -#### New environment keys (filter) - -* `CM_GOOGLE_SAXML*` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-frameworks/get-onnxruntime-prebuilt/index.md b/docs/AI-ML-frameworks/get-onnxruntime-prebuilt/index.md deleted file mode 100644 index 20419da08..000000000 --- a/docs/AI-ML-frameworks/get-onnxruntime-prebuilt/index.md +++ /dev/null @@ -1,157 +0,0 @@ -Automatically generated README for this automation recipe: **get-onnxruntime-prebuilt** - -Category: **AI/ML frameworks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-onnxruntime-prebuilt,be02c84ff57c4244) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install onnxruntime get prebuilt lib lang-c lang-cpp" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp` - -`cm run script --tags=install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp[,variations] ` - -*or* - -`cmr "install onnxruntime get prebuilt lib lang-c lang-cpp"` - -`cmr "install onnxruntime get prebuilt lib lang-c lang-cpp [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,onnxruntime,get,prebuilt,lib,lang-c,lang-cpp"``` - -#### Run this script via Docker (beta) - -`cm docker script "install onnxruntime get prebuilt lib lang-c lang-cpp[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**device**" -
    - Click here to expand this section. - - * **`_cpu`** (default) - - Environment variables: - - *CM_ONNXRUNTIME_DEVICE*: `` - - Workflow: - * `_cuda` - - Environment variables: - - *CM_ONNXRUNTIME_DEVICE*: `gpu` - - Workflow: - -
    - - -#### Default variations - -`_cpu` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `1.16.3` - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-onnxruntime-prebuilt/_cm.json) - -___ -### Script output -`cmr "install onnxruntime get prebuilt lib lang-c lang-cpp [,variations]" -j` -#### New environment keys (filter) - -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+DYLD_FALLBACK_LIBRARY_PATH` -* `+LD_LIBRARY_PATH` -* `+PATH` -* `CM_ONNXRUNTIME_INCLUDE_PATH` -* `CM_ONNXRUNTIME_LIB_PATH` -#### New environment keys auto-detected from customize - -* `CM_ONNXRUNTIME_INCLUDE_PATH` -* `CM_ONNXRUNTIME_LIB_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-qaic-apps-sdk/index.md b/docs/AI-ML-frameworks/get-qaic-apps-sdk/index.md deleted file mode 100644 index 836595396..000000000 --- a/docs/AI-ML-frameworks/get-qaic-apps-sdk/index.md +++ /dev/null @@ -1,124 +0,0 @@ -Automatically generated README for this automation recipe: **get-qaic-apps-sdk** - -Category: **AI/ML frameworks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-qaic-apps-sdk,0a9e206af6764da9) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get detect qaic apps sdk apps-sdk qaic-apps-sdk" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk` - -`cm run script --tags=get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk ` - -*or* - -`cmr "get detect qaic apps sdk apps-sdk qaic-apps-sdk"` - -`cmr "get detect qaic apps sdk apps-sdk qaic-apps-sdk " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,detect,qaic,apps,sdk,apps-sdk,qaic-apps-sdk"``` - -#### Run this script via Docker (beta) - -`cm docker script "get detect qaic apps sdk apps-sdk qaic-apps-sdk" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-apps-sdk/_cm.json) - -___ -### Script output -`cmr "get detect qaic apps sdk apps-sdk qaic-apps-sdk " -j` -#### New environment keys (filter) - -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+DYLD_FALLBACK_LIBRARY_PATH` -* `+LD_LIBRARY_PATH` -* `+PATH` -* `CM_QAIC_EXEC_PATH` -#### New environment keys auto-detected from customize - -* `CM_QAIC_EXEC_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-qaic-platform-sdk/index.md b/docs/AI-ML-frameworks/get-qaic-platform-sdk/index.md deleted file mode 100644 index f712c9859..000000000 --- a/docs/AI-ML-frameworks/get-qaic-platform-sdk/index.md +++ /dev/null @@ -1,128 +0,0 @@ -Automatically generated README for this automation recipe: **get-qaic-platform-sdk** - -Category: **AI/ML frameworks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-qaic-platform-sdk,a60f86918dc9457d) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get detect qaic platform sdk platform-sdk qaic-platform-sdk" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk` - -`cm run script --tags=get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk ` - -*or* - -`cmr "get detect qaic platform sdk platform-sdk qaic-platform-sdk"` - -`cmr "get detect qaic platform sdk platform-sdk qaic-platform-sdk " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,detect,qaic,platform,sdk,platform-sdk,qaic-platform-sdk"``` - -#### Run this script via Docker (beta) - -`cm docker script "get detect qaic platform sdk platform-sdk qaic-platform-sdk" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-platform-sdk/_cm.json) - -___ -### Script output -`cmr "get detect qaic platform sdk platform-sdk qaic-platform-sdk " -j` -#### New environment keys (filter) - -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+DYLD_FALLBACK_LIBRARY_PATH` -* `+LD_LIBRARY_PATH` -* `+PATH` -* `CM_QAIC_RUNNER_PATH` -* `CM_QAIC_TOOLS_PATH` -#### New environment keys auto-detected from customize - -* `CM_QAIC_RUNNER_PATH` -* `CM_QAIC_TOOLS_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-qaic-software-kit/index.md b/docs/AI-ML-frameworks/get-qaic-software-kit/index.md deleted file mode 100644 index 62ab27a7c..000000000 --- a/docs/AI-ML-frameworks/get-qaic-software-kit/index.md +++ /dev/null @@ -1,176 +0,0 @@ -Automatically generated README for this automation recipe: **get-qaic-software-kit** - -Category: **AI/ML frameworks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-qaic-software-kit,3344655922694bbb) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,qaic,software,kit,qaic-software-kit* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get qaic software kit qaic-software-kit" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,qaic,software,kit,qaic-software-kit` - -`cm run script --tags=get,qaic,software,kit,qaic-software-kit[,variations] ` - -*or* - -`cmr "get qaic software kit qaic-software-kit"` - -`cmr "get qaic software kit qaic-software-kit [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,qaic,software,kit,qaic-software-kit' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,qaic,software,kit,qaic-software-kit"``` - -#### Run this script via Docker (beta) - -`cm docker script "get qaic software kit qaic-software-kit[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - -
    - - - * Group "**repo-source**" -
    - Click here to expand this section. - - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - * **`_repo.quic`** (default) - - Environment variables: - - *CM_GIT_URL*: `https://github.com/quic/software-kit-for-qualcomm-cloud-ai-100` - - Workflow: - -
    - - -#### Default variations - -`_repo.quic` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/_cm.json)*** - * get,git,repo - * CM names: `--adr.['qaic-software-git-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - * get,generic,sys-util,_libudev-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_libpci-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,google,test - - CM script: [get-google-test](https://github.com/mlcommons/cm4mlops/tree/master/script/get-google-test) - * get,cmake - * CM names: `--adr.['cmake']...` - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * get,compiler - * CM names: `--adr.['compiler']...` - - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-qaic-software-kit/_cm.json) - -___ -### Script output -`cmr "get qaic software kit qaic-software-kit [,variations]" -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_QAIC_RUNNER_PATH` -* `CM_QAIC_SOFTWARE_KIT_PATH` -#### New environment keys auto-detected from customize - -* `CM_QAIC_RUNNER_PATH` -* `CM_QAIC_SOFTWARE_KIT_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-rocm/index.md b/docs/AI-ML-frameworks/get-rocm/index.md deleted file mode 100644 index ed5e7b629..000000000 --- a/docs/AI-ML-frameworks/get-rocm/index.md +++ /dev/null @@ -1,126 +0,0 @@ -Automatically generated README for this automation recipe: **get-rocm** - -Category: **AI/ML frameworks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-rocm,23a69f9477cb4dab) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,rocm,get-rocm* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get rocm get-rocm" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,rocm,get-rocm` - -`cm run script --tags=get,rocm,get-rocm ` - -*or* - -`cmr "get rocm get-rocm"` - -`cmr "get rocm get-rocm " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,rocm,get-rocm' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,rocm,get-rocm"``` - -#### Run this script via Docker (beta) - -`cm docker script "get rocm get-rocm" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/_cm.json)*** - * install,rocm - * Enable this dependency only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [install-rocm](https://github.com/mlcommons/cm4mlops/tree/master/script/install-rocm) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rocm/_cm.json) - -___ -### Script output -`cmr "get rocm get-rocm " -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_ROCM_*` -#### New environment keys auto-detected from customize - -* `CM_ROCM_CACHE_TAGS` -* `CM_ROCM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/get-tvm/index.md b/docs/AI-ML-frameworks/get-tvm/index.md deleted file mode 100644 index af40c0419..000000000 --- a/docs/AI-ML-frameworks/get-tvm/index.md +++ /dev/null @@ -1,198 +0,0 @@ -Automatically generated README for this automation recipe: **get-tvm** - -Category: **AI/ML frameworks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-tvm,93c89140e6224f4b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,tvm,get-tvm* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get tvm get-tvm" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,tvm,get-tvm` - -`cm run script --tags=get,tvm,get-tvm[,variations] ` - -*or* - -`cmr "get tvm get-tvm"` - -`cmr "get tvm get-tvm [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,tvm,get-tvm' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,tvm,get-tvm"``` - -#### Run this script via Docker (beta) - -`cm docker script "get tvm get-tvm[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_cuda` - - Environment variables: - - *CM_TVM_USE_CUDA*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_openmp` - - Environment variables: - - *CM_TVM_USE_OPENMP*: `yes` - - Workflow: - -
    - - - * Group "**installation-type**" -
    - Click here to expand this section. - - * **`_llvm`** (default) - - Environment variables: - - *CM_TVM_USE_LLVM*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,llvm - * CM names: `--adr.['llvm']...` - - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) - * `_pip-install` - - Environment variables: - - *CM_TVM_PIP_INSTALL*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_apache-tvm - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - -#### Default variations - -`_llvm` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_CHECKOUT: `main` -* CM_GIT_URL: `https://github.com/apache/tvm` -* CM_TVM_PIP_INSTALL: `no` - -
    - -#### Versions -* `main` -* `v0.10.0` -* `v0.7.0` -* `v0.8.0` -* `v0.9.0` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/_cm.json)*** - * cmake,get-cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,generic-python-lib,_typing_extensions - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_decorator - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_scipy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_attrs - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_psutil - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm/_cm.json) - -___ -### Script output -`cmr "get tvm get-tvm [,variations]" -j` -#### New environment keys (filter) - -* `+DYLD_FALLBACK_LIBRARY_PATH` -* `+LD_LIBRARY_PATH` -* `+PYTHONPATH` -* `CM_TVM_*` -* `TVM_HOME` -#### New environment keys auto-detected from customize - -* `CM_TVM_PATH_INCLUDE` -* `CM_TVM_PATH_LIB` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/index.md b/docs/AI-ML-frameworks/index.md new file mode 100644 index 000000000..1ffe87881 --- /dev/null +++ b/docs/AI-ML-frameworks/index.md @@ -0,0 +1,14 @@ +The AI/ML frameworks category contains the following scripts: + +- [get-google-saxml](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-google-saxml/README.md) +- [get-onnxruntime-prebuilt](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-onnxruntime-prebuilt/README.md) +- [get-qaic-apps-sdk](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-qaic-apps-sdk/README.md) +- [get-qaic-platform-sdk](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-qaic-platform-sdk/README.md) +- [get-qaic-software-kit](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-qaic-software-kit/README.md) +- [get-rocm](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-rocm/README.md) +- [get-tvm](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-tvm/README.md) +- [install-qaic-compute-sdk-from-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-qaic-compute-sdk-from-src/README.md) +- [install-rocm](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-rocm/README.md) +- [install-tensorflow-for-c](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-tensorflow-for-c/README.md) +- [install-tensorflow-from-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-tensorflow-from-src/README.md) +- [install-tflite-from-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-tflite-from-src/README.md) diff --git a/docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src/index.md b/docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src/index.md deleted file mode 100644 index b8895826d..000000000 --- a/docs/AI-ML-frameworks/install-qaic-compute-sdk-from-src/index.md +++ /dev/null @@ -1,199 +0,0 @@ -Automatically generated README for this automation recipe: **install-qaic-compute-sdk-from-src** - -Category: **AI/ML frameworks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-qaic-compute-sdk-from-src,9701bdda97fa4045) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk` - -`cm run script --tags=get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk[,variations] ` - -*or* - -`cmr "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk"` - -`cmr "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,qaic,from.src,software,compute,compute-sdk,qaic-compute-sdk,sdk"``` - -#### Run this script via Docker (beta) - -`cm docker script "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - -
    - - - * Group "**installation-mode**" -
    - Click here to expand this section. - - * `_debug` - - Environment variables: - - *CM_QAIC_COMPUTE_SDK_INSTALL_MODE*: `debug` - - Workflow: - * **`_release`** (default) - - Environment variables: - - *CM_QAIC_COMPUTE_SDK_INSTALL_MODE*: `release` - - Workflow: - * `_release-assert` - - Environment variables: - - *CM_QAIC_COMPUTE_SDK_INSTALL_MODE*: `release-assert` - - Workflow: - -
    - - - * Group "**repo-source**" -
    - Click here to expand this section. - - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - * **`_repo.quic`** (default) - - Environment variables: - - *CM_GIT_URL*: `https://github.com/quic/software-kit-for-qualcomm-cloud-ai-100-cc` - - Workflow: - -
    - - -#### Default variations - -`_release,_repo.quic` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/_cm.json)*** - * get,git,repo,_repo.https://github.com/quic/software-kit-for-qualcomm-cloud-ai-100-cc - * CM names: `--adr.['qaic-software-git-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - * get,cmake - * CM names: `--adr.['cmake']...` - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * get,llvm,_from-src - * CM names: `--adr.['llvm']...` - - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) - * get,generic,sys-util,_libudev-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_libpci-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,google,test - - CM script: [get-google-test](https://github.com/mlcommons/cm4mlops/tree/master/script/get-google-test) - * get,generic-sys-util,_ninja-build - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic-sys-util,_rsync - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * download-and-extract,_extract,_url.https://codelinaro.jfrog.io/artifactory/codelinaro-toolchain-for-hexagon/v15.0.5/clang+llvm-15.0.5-cross-hexagon-unknown-linux-musl.tar.xz - * CM names: `--adr.['dae']...` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-qaic-compute-sdk-from-src/_cm.json) - -___ -### Script output -`cmr "get qaic from.src software compute compute-sdk qaic-compute-sdk sdk [,variations]" -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_QAIC_COMPUTE_SDK_PATH` -#### New environment keys auto-detected from customize - -* `CM_QAIC_COMPUTE_SDK_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/install-rocm/index.md b/docs/AI-ML-frameworks/install-rocm/index.md deleted file mode 100644 index 019cd2cd6..000000000 --- a/docs/AI-ML-frameworks/install-rocm/index.md +++ /dev/null @@ -1,129 +0,0 @@ -Automatically generated README for this automation recipe: **install-rocm** - -Category: **AI/ML frameworks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-rocm,9d13f90463ce4545) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,rocm,install-rocm* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install rocm install-rocm" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,rocm,install-rocm` - -`cm run script --tags=install,rocm,install-rocm ` - -*or* - -`cmr "install rocm install-rocm"` - -`cmr "install rocm install-rocm " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,rocm,install-rocm' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,rocm,install-rocm"``` - -#### Run this script via Docker (beta) - -`cm docker script "install rocm install-rocm" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `5.7.1` - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/_cm.json) - 1. ***Run native script if exists*** - * [run-rhel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/run-rhel.sh) - * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/run-ubuntu.sh) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-rocm/_cm.json) - -___ -### Script output -`cmr "install rocm install-rocm " -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_ROCM_*` -#### New environment keys auto-detected from customize - -* `CM_ROCM_BIN_WITH_PATH` -* `CM_ROCM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/AI-ML-frameworks/install-tensorflow-for-c/index.md b/docs/AI-ML-frameworks/install-tensorflow-for-c/index.md deleted file mode 100644 index 845aae451..000000000 --- a/docs/AI-ML-frameworks/install-tensorflow-for-c/index.md +++ /dev/null @@ -1,122 +0,0 @@ -Automatically generated README for this automation recipe: **install-tensorflow-for-c** - -Category: **AI/ML frameworks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-tensorflow-for-c,d73783d8302547d7) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,tensorflow,lib,lang-c* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install tensorflow lib lang-c" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,tensorflow,lib,lang-c` - -`cm run script --tags=install,tensorflow,lib,lang-c ` - -*or* - -`cmr "install tensorflow lib lang-c"` - -`cmr "install tensorflow lib lang-c " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,tensorflow,lib,lang-c' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,tensorflow,lib,lang-c"``` - -#### Run this script via Docker (beta) - -`cm docker script "install tensorflow lib lang-c" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `2.8.0` - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-for-c/_cm.json) - -___ -### Script output -`cmr "install tensorflow lib lang-c " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-frameworks/install-tensorflow-from-src/index.md b/docs/AI-ML-frameworks/install-tensorflow-from-src/index.md deleted file mode 100644 index 4421e0df6..000000000 --- a/docs/AI-ML-frameworks/install-tensorflow-from-src/index.md +++ /dev/null @@ -1,165 +0,0 @@ -Automatically generated README for this automation recipe: **install-tensorflow-from-src** - -Category: **AI/ML frameworks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-tensorflow-from-src,a974533c4c854597) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,install,tensorflow,lib,source,from-source,from-src,src,from.src* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get install tensorflow lib source from-source from-src src from.src" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,install,tensorflow,lib,source,from-source,from-src,src,from.src` - -`cm run script --tags=get,install,tensorflow,lib,source,from-source,from-src,src,from.src[,variations] ` - -*or* - -`cmr "get install tensorflow lib source from-source from-src src from.src"` - -`cmr "get install tensorflow lib source from-source from-src src from.src [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,install,tensorflow,lib,source,from-source,from-src,src,from.src' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,install,tensorflow,lib,source,from-source,from-src,src,from.src"``` - -#### Run this script via Docker (beta) - -`cm docker script "get install tensorflow lib source from-source from-src src from.src[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_tflite` - - Environment variables: - - *CM_TFLITE*: `on` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_URL: `https://github.com/tensorflow/tensorflow` -* CM_GIT_DEPTH: `1` -* CM_TFLITE: `off` - -
    - -#### Versions -Default version: `master` - -* `master` -* `v1.15.0` -* `v2.0.0` -* `v2.1.0` -* `v2.2.0` -* `v2.3.0` -* `v2.4.0` -* `v2.5.0` -* `v2.6.0` -* `v2.7.0` -* `v2.8.0` -* `v2.9.0` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/_cm.json)*** - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,generic-sys-util,_zlib - * Enable this dependency only if all ENV vars are set:
    -`{'CM_HOST_OS_FLAVOR': ['ubuntu'], 'CM_HOST_OS_VERSION': ['18.04']}` - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic-python-lib,_package.numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tensorflow-from-src/_cm.json) - -___ -### Script output -`cmr "get install tensorflow lib source from-source from-src src from.src [,variations]" -j` -#### New environment keys (filter) - -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+DYLD_FALLBACK_LIBRARY_PATH` -* `+LD_LIBRARY_PATH` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-frameworks/install-tflite-from-src/index.md b/docs/AI-ML-frameworks/install-tflite-from-src/index.md deleted file mode 100644 index aa40f96eb..000000000 --- a/docs/AI-ML-frameworks/install-tflite-from-src/index.md +++ /dev/null @@ -1,135 +0,0 @@ -Automatically generated README for this automation recipe: **install-tflite-from-src** - -Category: **AI/ML frameworks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-tflite-from-src,5c72dab5eb88407c) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,install,tflite-cmake,tensorflow-lite-cmake,from-src* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get install tflite-cmake tensorflow-lite-cmake from-src" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,install,tflite-cmake,tensorflow-lite-cmake,from-src` - -`cm run script --tags=get,install,tflite-cmake,tensorflow-lite-cmake,from-src ` - -*or* - -`cmr "get install tflite-cmake tensorflow-lite-cmake from-src"` - -`cmr "get install tflite-cmake tensorflow-lite-cmake from-src " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,install,tflite-cmake,tensorflow-lite-cmake,from-src' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,install,tflite-cmake,tensorflow-lite-cmake,from-src"``` - -#### Run this script via Docker (beta) - -`cm docker script "get install tflite-cmake tensorflow-lite-cmake from-src" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_DEPTH: `1` - -
    - -#### Versions -Default version: `master` - -* `master` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/_cm.json)*** - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,compiler - * CM names: `--adr.['compiler']...` - - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) - * get,cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tflite-from-src/_cm.json) - -___ -### Script output -`cmr "get install tflite-cmake tensorflow-lite-cmake from-src " -j` -#### New environment keys (filter) - -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+DYLD_FALLBACK_LIBRARY_PATH` -* `+LD_LIBRARY_PATH` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/convert-ml-model-huggingface-to-onnx/index.md b/docs/AI-ML-models/convert-ml-model-huggingface-to-onnx/index.md deleted file mode 100644 index 4c409f992..000000000 --- a/docs/AI-ML-models/convert-ml-model-huggingface-to-onnx/index.md +++ /dev/null @@ -1,143 +0,0 @@ -Automatically generated README for this automation recipe: **convert-ml-model-huggingface-to-onnx** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=convert-ml-model-huggingface-to-onnx,eacb01655d7e49ac) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *ml-model,model,huggingface-to-onnx,onnx,huggingface,convert* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "ml-model model huggingface-to-onnx onnx huggingface convert" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=ml-model,model,huggingface-to-onnx,onnx,huggingface,convert` - -`cm run script --tags=ml-model,model,huggingface-to-onnx,onnx,huggingface,convert[,variations] ` - -*or* - -`cmr "ml-model model huggingface-to-onnx onnx huggingface convert"` - -`cmr "ml-model model huggingface-to-onnx onnx huggingface convert [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'ml-model,model,huggingface-to-onnx,onnx,huggingface,convert' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="ml-model,model,huggingface-to-onnx,onnx,huggingface,convert"``` - -#### Run this script via Docker (beta) - -`cm docker script "ml-model model huggingface-to-onnx onnx huggingface convert[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_model-path.#` - - Environment variables: - - *CM_MODEL_HUGG_PATH*: `#` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/_cm.json)*** - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_transformers - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnxruntime - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-ml-model-huggingface-to-onnx/_cm.json) - -___ -### Script output -`cmr "ml-model model huggingface-to-onnx onnx huggingface convert [,variations]" -j` -#### New environment keys (filter) - -* `CM_ML_MODEL*` -* `CM_MODEL_HUGG_PATH` -* `HUGGINGFACE_ONNX_FILE_PATH` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-bert-squad-vocab/index.md b/docs/AI-ML-models/get-bert-squad-vocab/index.md deleted file mode 100644 index 3067bcb2e..000000000 --- a/docs/AI-ML-models/get-bert-squad-vocab/index.md +++ /dev/null @@ -1,119 +0,0 @@ -Automatically generated README for this automation recipe: **get-bert-squad-vocab** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-bert-squad-vocab,2f99a545ce734157) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,bert,squad,bert-large,bert-squad,vocab* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get bert squad bert-large bert-squad vocab" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,bert,squad,bert-large,bert-squad,vocab` - -`cm run script --tags=get,bert,squad,bert-large,bert-squad,vocab ` - -*or* - -`cmr "get bert squad bert-large bert-squad vocab"` - -`cmr "get bert squad bert-large bert-squad vocab " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,bert,squad,bert-large,bert-squad,vocab' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,bert,squad,bert-large,bert-squad,vocab"``` - -#### Run this script via Docker (beta) - -`cm docker script "get bert squad bert-large bert-squad vocab" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab/_cm.json) - 1. Run "preprocess" function from customize.py - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab/_cm.json)*** - * download,file - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bert-squad-vocab/_cm.json) - -___ -### Script output -`cmr "get bert squad bert-large bert-squad vocab " -j` -#### New environment keys (filter) - -* `CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-dlrm/index.md b/docs/AI-ML-models/get-dlrm/index.md deleted file mode 100644 index 9bb81a69a..000000000 --- a/docs/AI-ML-models/get-dlrm/index.md +++ /dev/null @@ -1,143 +0,0 @@ -Automatically generated README for this automation recipe: **get-dlrm** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-dlrm,63680ac2449a4241) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,src,dlrm* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get src dlrm" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,src,dlrm` - -`cm run script --tags=get,src,dlrm[,variations] ` - -*or* - -`cmr "get src dlrm"` - -`cmr "get src dlrm [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,src,dlrm' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,src,dlrm"``` - -#### Run this script via Docker (beta) - -`cm docker script "get src dlrm[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_full-history` - - Environment variables: - - *CM_GIT_DEPTH*: `` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_DEPTH: `--depth 10` -* CM_GIT_PATCH: `no` -* CM_GIT_URL: `https://github.com/facebookresearch/dlrm.git` - -
    - -#### Versions -Default version: `main` - -* `main` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-dlrm/_cm.json) - -___ -### Script output -`cmr "get src dlrm [,variations]" -j` -#### New environment keys (filter) - -* `DLRM_DIR` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-ml-model-3d-unet-kits19/index.md b/docs/AI-ML-models/get-ml-model-3d-unet-kits19/index.md deleted file mode 100644 index 1ae4ae572..000000000 --- a/docs/AI-ML-models/get-ml-model-3d-unet-kits19/index.md +++ /dev/null @@ -1,200 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-3d-unet-kits19** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-3d-unet-kits19,fb7e31419c0f4226) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,raw,3d-unet,kits19,medical-imaging* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ml-model raw 3d-unet kits19 medical-imaging" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ml-model,raw,3d-unet,kits19,medical-imaging` - -`cm run script --tags=get,ml-model,raw,3d-unet,kits19,medical-imaging[,variations] ` - -*or* - -`cmr "get ml-model raw 3d-unet kits19 medical-imaging"` - -`cmr "get ml-model raw 3d-unet kits19 medical-imaging [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ml-model,raw,3d-unet,kits19,medical-imaging' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ml-model,raw,3d-unet,kits19,medical-imaging"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ml-model raw 3d-unet kits19 medical-imaging[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_onnx,fp32` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `0.86170` - - *CM_PACKAGE_URL*: `https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128_dynbatch.onnx?download=1` - - Workflow: - * `_pytorch,fp32` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `0.86170` - - *CM_PACKAGE_URL*: `https://zenodo.org/record/5597155/files/3dunet_kits19_pytorch.ptc?download=1` - - Workflow: - * `_pytorch,fp32,weights` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `0.86170` - - *CM_ML_MODEL_FILE*: `retinanet_model_10.pth` - - *CM_PACKAGE_URL*: `https://zenodo.org/record/5597155/files/3dunet_kits19_pytorch_checkpoint.pth?download=1` - - *CM_UNZIP*: `yes` - - Workflow: - * `_tf,fp32` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `0.86170` - - *CM_ML_MODEL_FILE*: `3dunet_kits19_128x128x128.tf` - - *CM_PACKAGE_URL*: `https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128.tf.zip?download=1` - - *CM_UNZIP*: `yes` - - Workflow: - * `_weights` - - Environment variables: - - *CM_MODEL_WEIGHTS_FILE*: `yes` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * **`_onnx`** (default) - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `onnx` - - Workflow: - * `_pytorch` - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `pytorch` - - Workflow: - * `_tf` - - Aliases: `_tensorflow` - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `tensorflow` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` - - *CM_ML_MODEL_PRECISION*: `fp32` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` - - Workflow: - -
    - - -#### Default variations - -`_fp32,_onnx` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-3d-unet-kits19/_cm.json) - -___ -### Script output -`cmr "get ml-model raw 3d-unet kits19 medical-imaging [,variations]" -j` -#### New environment keys (filter) - -* `CM_ML_MODEL_*` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_FILE` -* `CM_ML_MODEL_FILE_WITH_PATH` -* `CM_ML_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-bert-base-squad/index.md b/docs/AI-ML-models/get-ml-model-bert-base-squad/index.md deleted file mode 100644 index 28bc15a1b..000000000 --- a/docs/AI-ML-models/get-ml-model-bert-base-squad/index.md +++ /dev/null @@ -1,183 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-bert-base-squad** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-bert-base-squad,b3b10b452ce24c5f) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ml-model raw bert bert-base bert-squad language language-processing" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing` - -`cm run script --tags=get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing[,variations] ` - -*or* - -`cmr "get ml-model raw bert bert-base bert-squad language language-processing"` - -`cmr "get ml-model raw bert bert-base bert-squad language language-processing [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ml-model,raw,bert,bert-base,bert-squad,language,language-processing"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ml-model raw bert bert-base bert-squad language language-processing[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_deepsparse,int8` - - Environment variables: - - *CM_ML_MODEL_F1*: `87.89` - - *CM_ML_MODEL_FILE*: `model.onnx` - - *CM_PRUNING_PERCENTAGE*: `95` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,ml-model,zoo,deepsparse,_pruned95_obs_quant-none - * CM names: `--adr.['neural-magic-zoo-downloader']...` - - *Warning: no scripts found* - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * `_deepsparse` - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `deepsparse` - - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` - - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` - - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` - - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` - - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_ML_MODEL_PRECISION*: `fp32` - - Workflow: - * `_int8` - - Environment variables: - - *CM_ML_MODEL_PRECISION*: `int8` - - *CM_ML_MODEL_QUANTIZED*: `yes` - - Workflow: - -
    - - -#### Default variations - -`_fp32` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad/_cm.json) - 1. Run "preprocess" function from customize.py - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad/_cm.json)*** - * download-and-extract - * Enable this dependency only if all ENV vars are set:
    -`{'CM_TMP_ML_MODEL_REQUIRE_DOWNLOAD': 'yes'}` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad/_cm.json) - 1. Run "postrocess" function from customize.py - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-base-squad/_cm.json)*** - * get,bert,squad,vocab - - CM script: [get-bert-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bert-squad-vocab) - -___ -### Script output -`cmr "get ml-model raw bert bert-base bert-squad language language-processing [,variations]" -j` -#### New environment keys (filter) - -* `CM_ML_MODEL*` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-ml-model-bert-large-squad/index.md b/docs/AI-ML-models/get-ml-model-bert-large-squad/index.md deleted file mode 100644 index df467b7a4..000000000 --- a/docs/AI-ML-models/get-ml-model-bert-large-squad/index.md +++ /dev/null @@ -1,357 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-bert-large-squad** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-bert-large-squad,5e865dbdc65949d2) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ml-model raw bert bert-large bert-squad language language-processing" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing` - -`cm run script --tags=get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing[,variations] ` - -*or* - -`cmr "get ml-model raw bert bert-large bert-squad language language-processing"` - -`cmr "get ml-model raw bert bert-large bert-squad language language-processing [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ml-model,raw,bert,bert-large,bert-squad,language,language-processing"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ml-model raw bert bert-large bert-squad language language-processing[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_deepsparse,int8` - - Environment variables: - - *CM_ML_MODEL_F1*: `90.21282641816266` - - *CM_ML_MODEL_FILE*: `oBERT-Large_95sparse_block4_qat.onnx` - - *CM_DAE_EXTRACT_DOWNLOADED*: `yes` - - Workflow: - * `_deepsparse,int8,github` - - Environment variables: - - *CM_PACKAGE_URL*: `https://github.com/mlcommons/inference_results_v2.1/raw/master/open/NeuralMagic/code/bert/deepsparse/models/oBERT-Large_95sparse_block4_qat.onnx.tar.xz` - - Workflow: - * `_onnx,fp32` - - Environment variables: - - *CM_ML_MODEL_F1*: `90.874` - - Workflow: - * `_onnx,fp32,armi` - - Environment variables: - - *CM_PACKAGE_URL*: `https://armi.in/files/model.onnx` - - *CM_PACKAGE_URL1*: `https://zenodo.org/record/3733910/files/model.onnx` - - Workflow: - * `_onnx,fp32,zenodo` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/3733910/files/model.onnx` - - Workflow: - * `_onnx,int8` - - Environment variables: - - *CM_ML_MODEL_F1*: `90.067` - - *CM_PACKAGE_URL*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` - - Workflow: - * `_onnx,int8,amazon-s3` - - Environment variables: - - *CM_PACKAGE_URL*: `https://mlperf-public.s3.us-west-2.amazonaws.com/bert_large_v1_1_fake_quant.onnx` - - Workflow: - * `_onnx,int8,zenodo` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` - - Workflow: - * `_onnxruntime` - - Workflow: - * `_pytorch,fp32` - - Environment variables: - - *CM_ML_MODEL_F1*: `90.874` - - *CM_DOWNLOAD_CHECKSUM*: `00fbcbfaebfa20d87ac9885120a6e9b4` - - Workflow: - * `_pytorch,fp32,armi` - - Environment variables: - - *CM_PACKAGE_URL*: `https://armi.in/files/fp32/model.pytorch` - - *CM_PACKAGE_URL1*: `https://zenodo.org/record/3733896/files/model.pytorch` - - Workflow: - * `_pytorch,fp32,zenodo` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/3733896/files/model.pytorch` - - Workflow: - * `_pytorch,int8` - - Environment variables: - - *CM_ML_MODEL_F1*: `90.633` - - Workflow: - * `_pytorch,int8,armi` - - Environment variables: - - *CM_PACKAGE_URL*: `https://armi.in/files/int8/pytorch_model.bin` - - *CM_PACKAGE_URL1*: `https://zenodo.org/record/4792496/files/pytorch_model.bin` - - Workflow: - * `_pytorch,int8,zenodo` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/4792496/files/pytorch_model.bin` - - Workflow: - * `_tensorflow` - - Workflow: - * `_tf,fp32` - - Environment variables: - - *CM_ML_MODEL_F1*: `90.874` - - Workflow: - * `_tf,fp32,zenodo` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/3939747/files/model.pb` - - Workflow: - -
    - - - * Group "**download-source**" -
    - Click here to expand this section. - - * `_amazon-s3` - - Workflow: - * `_armi` - - Workflow: - * `_custom-url.#` - - Environment variables: - - *CM_PACKAGE_URL*: `#` - - Workflow: - * `_github` - - Workflow: - * `_zenodo` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * `_deepsparse` - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `deepsparse` - - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` - - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` - - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` - - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` - - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` - - Workflow: - * **`_onnx`** (default) - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `onnx` - - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` - - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` - - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` - - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` - - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` - - Workflow: - * `_pytorch` - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `pytorch` - - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` - - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` - - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` - - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` - - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` - - Workflow: - * `_tf` - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `tf` - - *CM_ML_MODEL_INPUT_IDS_NAME*: `input_ids` - - *CM_ML_MODEL_INPUT_MASK_NAME*: `input_mask` - - *CM_ML_MODEL_INPUT_SEGMENTS_NAME*: `segment_ids` - - *CM_ML_MODEL_OUTPUT_END_LOGITS_NAME*: `output_end_logits` - - *CM_ML_MODEL_OUTPUT_START_LOGITS_NAME*: `output_start_logits` - - Workflow: - -
    - - - * Group "**packing**" -
    - Click here to expand this section. - - * `_packed` - - Environment variables: - - *CM_ML_MODEL_BERT_PACKED*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_torch - * CM names: `--adr.['torch', 'pytorch']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.tensorflow - * CM names: `--adr.['tensorflow']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.transformers - * CM names: `--adr.['transformers']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.protobuf - * CM names: `--adr.['protobuf']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.onnx - * CM names: `--adr.['onnx']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnx-graphsurgeon - * CM names: `--adr.['onnx-graphsurgeon']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_numpy - * CM names: `--adr.['numpy']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,mlperf,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - 1. ***Read "prehook_deps" on other CM scripts*** - * download,file,_wget,_url.https://zenodo.org/record/3733868/files/model.ckpt-5474.data-00000-of-00001 - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - * download,file,_wget,_url.https://zenodo.org/record/3733868/files/model.ckpt-5474.index - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - * download,file,_wget,_url.https://zenodo.org/record/3733868/files/model.ckpt-5474.meta - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - * download,file,_wget,_url.https://zenodo.org/record/3733868/files/vocab.txt - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - * download,file,_wget,_url.https://raw.githubusercontent.com/krai/axs2kilt/main/model_onnx_bert_large_packed_recipe/convert_model.py - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - * **`_unpacked`** (default) - - Environment variables: - - *CM_ML_MODEL_BERT_PACKED*: `no` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_ML_MODEL_PRECISION*: `fp32` - - Workflow: - * `_int8` - - Environment variables: - - *CM_ML_MODEL_PRECISION*: `int8` - - *CM_ML_MODEL_QUANTIZED*: `yes` - - Workflow: - -
    - - -#### Default variations - -`_fp32,_onnx,_unpacked` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/_cm.json)*** - * download-and-extract - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_ML_MODEL_BERT_PACKED': ['yes']}` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - 1. ***Run native script if exists*** - * [run-packed.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/run-packed.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-bert-large-squad/_cm.json)*** - * get,dataset-aux,squad-vocab - - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) - -___ -### Script output -`cmr "get ml-model raw bert bert-large bert-squad language language-processing [,variations]" -j` -#### New environment keys (filter) - -* `CM_ML_MODEL*` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_BERT_LARGE_FP32_PATH` -* `CM_ML_MODEL_BERT_LARGE_INT8_PATH` -* `CM_ML_MODEL_BERT_PACKED_PATH` -* `CM_ML_MODEL_FILE` -* `CM_ML_MODEL_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-dlrm-terabyte/index.md b/docs/AI-ML-models/get-ml-model-dlrm-terabyte/index.md deleted file mode 100644 index cc5c0328a..000000000 --- a/docs/AI-ML-models/get-ml-model-dlrm-terabyte/index.md +++ /dev/null @@ -1,262 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-dlrm-terabyte** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-dlrm-terabyte,8fa7582c603a4db3) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation` - -`cm run script --tags=get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation[,variations] [--input_flags]` - -*or* - -`cmr "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation"` - -`cmr "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ml-model,dlrm,raw,terabyte,criteo-terabyte,criteo,recommendation"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_debug` - - Environment variables: - - *CM_ML_MODEL_DEBUG*: `yes` - - Workflow: - * `_onnx,fp32` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `0.8025` - - *CM_PACKAGE_URL*: `https://dlrm.s3-us-west-1.amazonaws.com/models/tb00_40M.onnx.tar` - - *CM_UNTAR*: `yes` - - *CM_ML_MODEL_FILE*: `tb00_40M.onnx` - - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `40000000` - - Workflow: - * `_onnx,fp32,debug` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `0.8107` - - *CM_PACKAGE_URL*: `https://dlrm.s3-us-west-1.amazonaws.com/models/tb0875_10M.onnx.tar` - - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `10000000` - - *CM_UNTAR*: `yes` - - *CM_ML_MODEL_FILE*: `tb0875_10M.onnx` - - Workflow: - * `_pytorch,fp32` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `0.8025` - - *CM_PACKAGE_URL*: `https://dlrm.s3-us-west-1.amazonaws.com/models/tb00_40M.pt` - - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `40000000` - - *CM_DOWNLOAD_CHECKSUM*: `2d49a5288cddb37c3c64860a06d79bb9` - - Workflow: - * `_pytorch,fp32,debug` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `0.8107` - - *CM_PACKAGE_URL*: `https://dlrm.s3-us-west-1.amazonaws.com/models/tb0875_10M.pt` - - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `10000000` - - Workflow: - * `_pytorch,fp32,weight_sharded` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `0.8025` - - *CM_ML_MODEL_DLRM_MAX_INDEX_RANGE*: `40000000` - - *CM_ML_MODEL_FILE*: `model_weights` - - *CM_TMP_MODEL_ADDITIONAL_NAME*: `` - - *CM_DOWNLOAD_CHECKSUM*: `` - - Workflow: - * `_pytorch,fp32,weight_sharded,rclone` - - Environment variables: - - *CM_RCLONE_CONFIG_CMD*: `rclone config create mlc-inference s3 provider=Cloudflare access_key_id=f65ba5eef400db161ea49967de89f47b secret_access_key=fbea333914c292b854f14d3fe232bad6c5407bf0ab1bebf78833c2b359bdfd2b endpoint=https://c2686074cb2caf5cbaf6d134bdba8b47.r2.cloudflarestorage.com` - - *CM_PACKAGE_URL*: `mlc-inference:mlcommons-inference-wg-public/model_weights` - - Workflow: - * `_pytorch,fp32,weight_sharded,wget` - - Environment variables: - - *CM_PACKAGE_URL*: `https://cloud.mlcommons.org/index.php/s/XzfSeLgW8FYfR3S/download` - - *CM_DAE_EXTRACT_DOWNLOADED*: `yes` - - *CM_DOWNLOAD_FILENAME*: `download` - - *CM_EXTRACT_UNZIP*: `yes` - - Workflow: - -
    - - - * Group "**download-tool**" -
    - Click here to expand this section. - - * `_rclone` - - Workflow: - * `_wget` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * `_onnx` - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `onnx` - - Workflow: - * **`_pytorch`** (default) - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `pytorch` - - *CM_TMP_MODEL_ADDITIONAL_NAME*: `dlrm_terabyte.pytorch` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` - - *CM_ML_MODEL_PRECISION*: `fp32` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` - - Workflow: - -
    - - - * Group "**type**" -
    - Click here to expand this section. - - * **`_weight_sharded`** (default) - - Environment variables: - - *CM_DLRM_MULTIHOT_MODEL*: `yes` - - Workflow: - -
    - - -#### Default variations - -`_fp32,_pytorch,_weight_sharded` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--dir=value` → `CM_DOWNLOAD_PATH=value` -* `--download_path=value` → `CM_DOWNLOAD_PATH=value` -* `--to=value` → `CM_DOWNLOAD_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/_cm.json) - 1. Run "preprocess" function from customize.py - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/_cm.json)*** - * download-and-extract - * CM names: `--adr.['dae']...` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-dlrm-terabyte/_cm.json) - -___ -### Script output -`cmr "get ml-model dlrm raw terabyte criteo-terabyte criteo recommendation [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_ML_MODEL_*` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-ml-model-efficientnet-lite/index.md b/docs/AI-ML-models/get-ml-model-efficientnet-lite/index.md deleted file mode 100644 index c81976666..000000000 --- a/docs/AI-ML-models/get-ml-model-efficientnet-lite/index.md +++ /dev/null @@ -1,248 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-efficientnet-lite** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-efficientnet-lite,1041f681977d4b7c) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification` - -`cm run script --tags=get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification[,variations] ` - -*or* - -`cmr "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification"` - -`cmr "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ml-model,efficientnet,raw,ml-model-efficientnet,ml-model-efficientnet-lite,lite,tflite,image-classification"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_tflite` - - Workflow: - -
    - - - * Group "**kind**" -
    - Click here to expand this section. - - * **`_lite0`** (default) - - Environment variables: - - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite0` - - Workflow: - * `_lite1` - - Environment variables: - - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite1` - - Workflow: - * `_lite2` - - Environment variables: - - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite2` - - Workflow: - * `_lite3` - - Environment variables: - - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite3` - - Workflow: - * `_lite4` - - Environment variables: - - *CM_ML_MODEL_EFFICIENTNET_LITE_KIND*: `lite4` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_ML_MODEL_EFFICIENTNET_LITE_PRECISION*: `fp32` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_PRECISION*: `fp32` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` - - Workflow: - * `_uint8` - - Aliases: `_int8` - - Environment variables: - - *CM_ML_MODEL_EFFICIENTNET_LITE_PRECISION*: `int8` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `uint8` - - *CM_ML_MODEL_PRECISION*: `uint8` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `uint8` - - Workflow: - -
    - - - * Group "**resolution**" -
    - Click here to expand this section. - - * **`_resolution-224`** (default) - - Environment variables: - - *CM_ML_MODEL_IMAGE_HEIGHT*: `224` - - *CM_ML_MODEL_IMAGE_WIDTH*: `224` - - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `224` - - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.224` - - Workflow: - * `_resolution-240` - - Environment variables: - - *CM_ML_MODEL_IMAGE_HEIGHT*: `240` - - *CM_ML_MODEL_IMAGE_WIDTH*: `240` - - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `240` - - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.240` - - Workflow: - * `_resolution-260` - - Environment variables: - - *CM_ML_MODEL_IMAGE_HEIGHT*: `260` - - *CM_ML_MODEL_IMAGE_WIDTH*: `260` - - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `260` - - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.260` - - Workflow: - * `_resolution-280` - - Environment variables: - - *CM_ML_MODEL_IMAGE_HEIGHT*: `280` - - *CM_ML_MODEL_IMAGE_WIDTH*: `280` - - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `280` - - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.280` - - Workflow: - * `_resolution-300` - - Environment variables: - - *CM_ML_MODEL_IMAGE_HEIGHT*: `300` - - *CM_ML_MODEL_IMAGE_WIDTH*: `300` - - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `300` - - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.300` - - Workflow: - -
    - - -#### Default variations - -`_fp32,_lite0,_resolution-224` - -#### Valid variation combinations checked by the community - - - -* `_lite0,_resolution-224` -* `_lite1,_resolution-240` -* `_lite2,_resolution-260` -* `_lite3,_resolution-280` -* `_lite4,_resolution-300` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_ML_MODEL_INPUTS_DATA_TYPE: `fp32` -* CM_ML_MODEL_PRECISION: `fp32` -* CM_ML_MODEL_WEIGHTS_DATA_TYPE: `fp32` - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-efficientnet-lite/_cm.json) - -___ -### Script output -`cmr "get ml-model efficientnet raw ml-model-efficientnet ml-model-efficientnet-lite lite tflite image-classification [,variations]" -j` -#### New environment keys (filter) - -* `CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS` -* `CM_ML_MODEL_*` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_FILE` -* `CM_ML_MODEL_FILE_WITH_PATH` -* `CM_ML_MODEL_PATH` -* `CM_ML_MODEL_STARTING_WEIGHTS_FILENAME` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-gptj/index.md b/docs/AI-ML-models/get-ml-model-gptj/index.md deleted file mode 100644 index 5231048a7..000000000 --- a/docs/AI-ML-models/get-ml-model-gptj/index.md +++ /dev/null @@ -1,321 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-gptj** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-gptj,a41166210f294fbf) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,gptj,gpt-j,large-language-model* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get raw ml-model gptj gpt-j large-language-model" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,raw,ml-model,gptj,gpt-j,large-language-model` - -`cm run script --tags=get,raw,ml-model,gptj,gpt-j,large-language-model[,variations] [--input_flags]` - -*or* - -`cmr "get raw ml-model gptj gpt-j large-language-model"` - -`cmr "get raw ml-model gptj gpt-j large-language-model [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,raw,ml-model,gptj,gpt-j,large-language-model' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,raw,ml-model,gptj,gpt-j,large-language-model"``` - -#### Run this script via Docker (beta) - -`cm docker script "get raw ml-model gptj gpt-j large-language-model[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_batch_size.#` - - Environment variables: - - *CM_ML_MODEL_BATCH_SIZE*: `#` - - Workflow: - * `_pytorch,fp32` - - Environment variables: - - *CM_DOWNLOAD_EXTRA_OPTIONS*: ` --output-document checkpoint.zip` - - *CM_UNZIP*: `yes` - - *CM_DOWNLOAD_CHECKSUM_NOT_USED*: `e677e28aaf03da84584bb3073b7ee315` - - *CM_PACKAGE_URL*: `https://cloud.mlcommons.org/index.php/s/QAZ2oM94MkFtbQx/download` - - *CM_RCLONE_CONFIG_CMD*: `rclone config create mlc-inference s3 provider=Cloudflare access_key_id=f65ba5eef400db161ea49967de89f47b secret_access_key=fbea333914c292b854f14d3fe232bad6c5407bf0ab1bebf78833c2b359bdfd2b endpoint=https://c2686074cb2caf5cbaf6d134bdba8b47.r2.cloudflarestorage.com` - - *CM_RCLONE_URL*: `mlc-inference:mlcommons-inference-wg-public/gpt-j` - - Workflow: - * `_pytorch,fp32,wget` - - Workflow: - * `_pytorch,int4,intel` - - Workflow: - * `_pytorch,int8,intel` - - Workflow: - * `_pytorch,intel` - - Environment variables: - - *CM_GPTJ_INTEL_MODEL*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,mlperf,inference,results - - CM script: [get-mlperf-inference-results](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results) - - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) - * get,ml-model,gpt-j,_fp32,_pytorch - - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) - * get,conda,_name.gptj-pt - - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) - * get,python,_conda.gptj-pt - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic,conda-package,_package.intel-openmp,_source.intel - * CM names: `--adr.['conda-package', 'intel-openmp']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.jemalloc,_source.conda-forge - * CM names: `--adr.['conda-package', 'jemalloc']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * install,ipex,from.src,_for-intel-mlperf-inference-v3.1-gptj - - CM script: [install-ipex-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-ipex-from-src) - * get,dataset,cnndm,_calibration - - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) - * `_saxml,fp32` - - Environment variables: - - *CM_TMP_MODEL_SAXML*: `fp32` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,ml-model,gptj,_pytorch,_fp32 - - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_package.jax[cpu] - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.paxml - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.praxis - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.transformers - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.accelerate - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_saxml,int8` - - Environment variables: - - *CM_TMP_MODEL_SAXML*: `int8` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,ml-model,gptj,_saxml,_fp32 - - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_package.praxis - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.apache-beam - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,git,repo,_repo.https://github.com/google/saxml - * CM names: `--adr.['saxml']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - -
    - - - * Group "**download-tool**" -
    - Click here to expand this section. - - * **`_rclone`** (default) - - Environment variables: - - *CM_DOWNLOAD_FILENAME*: `checkpoint` - - *CM_DOWNLOAD_URL*: `<<>>` - - Workflow: - * `_wget` - - Environment variables: - - *CM_DOWNLOAD_URL*: `<<>>` - - *CM_DOWNLOAD_FILENAME*: `checkpoint.zip` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * **`_pytorch`** (default) - - Environment variables: - - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` - - *CM_ML_MODEL_FRAMEWORK*: `pytorch` - - *CM_ML_STARTING_WEIGHTS_FILENAME*: `<<>>` - - Workflow: - * `_saxml` - - Workflow: - -
    - - - * Group "**model-provider**" -
    - Click here to expand this section. - - * `_intel` - - Workflow: - * **`_mlcommons`** (default) - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * `_fp32` - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` - - *CM_ML_MODEL_PRECISION*: `fp32` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` - - Workflow: - * `_int4` - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int4` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int4` - - Workflow: - * `_int8` - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` - - *CM_ML_MODEL_PRECISION*: `int8` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` - - Workflow: - * `_uint8` - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` - - *CM_ML_MODEL_PRECISION*: `uint8` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` - - Workflow: - -
    - - -#### Default variations - -`_mlcommons,_pytorch,_rclone` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--checkpoint=value` → `GPTJ_CHECKPOINT_PATH=value` -* `--download_path=value` → `CM_DOWNLOAD_PATH=value` -* `--to=value` → `CM_DOWNLOAD_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "checkpoint":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/_cm.json)*** - * download-and-extract - * Enable this dependency only if all ENV vars are set:
    -`{'CM_TMP_REQUIRE_DOWNLOAD': ['yes']}` - * CM names: `--adr.['dae']...` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - 1. ***Run native script if exists*** - * [run-int4-calibration.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/run-int4-calibration.sh) - * [run-intel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/run-intel.sh) - * [run-saxml-quantized.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/run-saxml-quantized.sh) - * [run-saxml.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/run-saxml.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-gptj/_cm.json) - -___ -### Script output -`cmr "get raw ml-model gptj gpt-j large-language-model [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_ML_MODEL_*` -* `GPTJ_CHECKPOINT_PATH` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_FILE` -* `CM_ML_MODEL_FILE_WITH_PATH` -* `CM_ML_MODEL_WEIGHT_DATA_TYPES` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-huggingface-zoo/index.md b/docs/AI-ML-models/get-ml-model-huggingface-zoo/index.md deleted file mode 100644 index 7e5d18f56..000000000 --- a/docs/AI-ML-models/get-ml-model-huggingface-zoo/index.md +++ /dev/null @@ -1,192 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-huggingface-zoo** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-huggingface-zoo,53cf8252a443446a) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,huggingface,zoo* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ml-model huggingface zoo" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ml-model,huggingface,zoo` - -`cm run script --tags=get,ml-model,huggingface,zoo[,variations] [--input_flags]` - -*or* - -`cmr "get ml-model huggingface zoo"` - -`cmr "get ml-model huggingface zoo [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ml-model,huggingface,zoo' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ml-model,huggingface,zoo"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ml-model huggingface zoo[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_model-stub.#` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `#` - - Workflow: - * `_onnx-subfolder` - - Environment variables: - - *CM_HF_SUBFOLDER*: `onnx` - - Workflow: - * `_pierreguillou_bert_base_cased_squad_v1.1_portuguese` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `pierreguillou/bert-base-cased-squad-v1.1-portuguese` - - Workflow: - * `_prune` - - Environment variables: - - *CM_MODEL_TASK*: `prune` - - Workflow: - -
    - - - * Group "**download-type**" -
    - Click here to expand this section. - - * `_clone-repo` - - Environment variables: - - *CM_GIT_CLONE_REPO*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,git,repo,_lfs - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--download_path=value` → `CM_DOWNLOAD_PATH=value` -* `--env_key=value` → `CM_MODEL_ZOO_ENV_KEY=value` -* `--full_subfolder=value` → `CM_HF_FULL_SUBFOLDER=value` -* `--model_filename=value` → `CM_MODEL_ZOO_FILENAME=value` -* `--revision=value` → `CM_HF_REVISION=value` -* `--subfolder=value` → `CM_HF_SUBFOLDER=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "download_path":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/_cm.json)*** - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_huggingface_hub - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-huggingface-zoo/_cm.json) - -___ -### Script output -`cmr "get ml-model huggingface zoo [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_ML_MODEL*` -* `CM_MODEL_ZOO_STUB` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_'+env_key+'_FILE_WITH_PATH` -* `CM_ML_MODEL_'+env_key+'_PATH` -* `CM_ML_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-llama2/index.md b/docs/AI-ML-models/get-ml-model-llama2/index.md deleted file mode 100644 index 75957bee9..000000000 --- a/docs/AI-ML-models/get-ml-model-llama2/index.md +++ /dev/null @@ -1,222 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-llama2** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-llama2,5db97be9f61244c6) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get raw ml-model language-processing llama2 llama2-70b text-summarization" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization` - -`cm run script --tags=get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization[,variations] [--input_flags]` - -*or* - -`cmr "get raw ml-model language-processing llama2 llama2-70b text-summarization"` - -`cmr "get raw ml-model language-processing llama2 llama2-70b text-summarization [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization"``` - -#### Run this script via Docker (beta) - -`cm docker script "get raw ml-model language-processing llama2 llama2-70b text-summarization[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_batch_size.#` - - Environment variables: - - *CM_ML_MODEL_BATCH_SIZE*: `#` - - Workflow: - * `_pytorch,fp32` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * **`_pytorch`** (default) - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `pytorch` - - Workflow: - -
    - - - * Group "**huggingface-stub**" -
    - Click here to expand this section. - - * **`_meta-llama/Llama-2-70b-chat-hf`** (default) - - Environment variables: - - *CM_GIT_CHECKOUT_FOLDER*: `Llama-2-70b-chat-hf` - - *CM_MODEL_ZOO_ENV_KEY*: `LLAMA2` - - Workflow: - * `_meta-llama/Llama-2-7b-chat-hf` - - Environment variables: - - *CM_GIT_CHECKOUT_FOLDER*: `Llama-2-7b-chat-hf` - - *CM_MODEL_ZOO_ENV_KEY*: `LLAMA2` - - Workflow: - * `_stub.#` - - Environment variables: - - *CM_MODEL_ZOO_ENV_KEY*: `LLAMA2` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` - - *CM_ML_MODEL_PRECISION*: `fp32` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` - - Workflow: - * `_int8` - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` - - *CM_ML_MODEL_PRECISION*: `int8` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` - - Workflow: - * `_uint8` - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` - - *CM_ML_MODEL_PRECISION*: `uint8` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` - - Workflow: - -
    - - -#### Default variations - -`_fp32,_meta-llama/Llama-2-70b-chat-hf,_pytorch` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--checkpoint=value` → `LLAMA2_CHECKPOINT_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "checkpoint":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/_cm.json)*** - * get,ml-model,huggingface,zoo,_clone-repo - * Enable this dependency only if all ENV vars are set:
    -`{'CM_TMP_REQUIRE_DOWNLOAD': ['yes']}` - * CM names: `--adr.['hf-zoo']...` - - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-llama2/_cm.json) - -___ -### Script output -`cmr "get raw ml-model language-processing llama2 llama2-70b text-summarization [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_ML_MODEL_*` -* `LLAMA2_CHECKPOINT_PATH` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-mobilenet/index.md b/docs/AI-ML-models/get-ml-model-mobilenet/index.md deleted file mode 100644 index 94f71e697..000000000 --- a/docs/AI-ML-models/get-ml-model-mobilenet/index.md +++ /dev/null @@ -1,470 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-mobilenet** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-mobilenet,ce46675a3ab249e4) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ml-model mobilenet raw ml-model-mobilenet image-classification" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification` - -`cm run script --tags=get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification[,variations] ` - -*or* - -`cmr "get ml-model mobilenet raw ml-model-mobilenet image-classification"` - -`cmr "get ml-model mobilenet raw ml-model-mobilenet image-classification [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ml-model,mobilenet,raw,ml-model-mobilenet,image-classification"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ml-model mobilenet raw ml-model-mobilenet image-classification[variations]" ` - -___ -### Customization - - -#### Variations - - * *Internal group (variations should not be selected manually)* -
    - Click here to expand this section. - - * `_quantized_` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_NAME_SUFFIX*: `_quant` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `yes` - - Workflow: - * `_tf,from.google,v2,quantized_` - - Environment variables: - - *CM_PACKAGE_URL*: `https://storage.googleapis.com/mobilenet_v2/checkpoints/<<>>_v2_<<>>_<<>>.tgz` - - *CM_ML_MODEL_WEIGHTS_FILE*: `<<>>_v2_<<>>_<<>>.ckpt.data-00000-of-00001` - - *CM_ML_MODEL_FILE*: `model.tflite` - - *CM_EXTRACT_FOLDER*: `v2_<<>>_<<>>` - - *CM_UNTAR*: `yes` - - Workflow: - -
    - - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_onnx,fp32,v1` - - Environment variables: - - *CM_ML_MODEL_NORMALIZE_DATA*: `yes` - - *CM_ML_MODEL_SUBTRACT_MEANS*: `no` - - *CM_ML_MODEL_VER*: `1_1.0_224` - - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input:0` - - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV1/Predictions/Reshape_1:0` - - Workflow: - * `_onnx,int8,v1` - - Environment variables: - - *CM_ML_MODEL_NORMALIZE_DATA*: `no` - - *CM_ML_MODEL_SUBTRACT_MEANS*: `yes` - - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `128.0 128.0 128.0` - - *CM_ML_MODEL_VER*: `1_1.0_224_quant` - - *CM_ML_MODEL_INPUT_LAYER_NAME*: `0` - - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `169` - - *CM_PACKAGE_URL*: `https://zenodo.org/record/3353417/files/Quantized%20MobileNet.zip` - - *CM_ML_MODEL_FILE*: `mobilenet_sym_no_bn.onnx` - - *CM_UNZIP*: `yes` - - Workflow: - * `_onnx,opset-11,fp32,v1` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/4735651/files/mobilenet_v1_1.0_224.onnx` - - Workflow: - * `_onnx,opset-8,fp32,v1` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/3157894/files/mobilenet_v1_1.0_224.onnx` - - Workflow: - * `_tf,fp32,v1,resolution-224,multiplier-1.0` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `71.676` - - Workflow: - * `_tf,from.google,v1` - - Environment variables: - - *CM_PACKAGE_URL*: `http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_<<>>_<<>><<>>.tgz` - - *CM_UNTAR*: `yes` - - Workflow: - * `_tf,from.google,v2,fp32` - - Environment variables: - - *CM_PACKAGE_URL*: `https://storage.googleapis.com/mobilenet_v2/checkpoints/mobilenet_v2_<<>>_<<>>.tgz` - - *CM_ML_MODEL_WEIGHTS_FILE*: `mobilenet_v2_<<>>_<<>>.ckpt.data-00000-of-00001` - - *CM_ML_MODEL_FILE*: `mobilenet_v2_<<>>_<<>>.tflite` - - *CM_UNTAR*: `yes` - - Workflow: - * `_tf,from.google,v3` - - Environment variables: - - *CM_PACKAGE_URL*: `https://storage.googleapis.com/mobilenet_v3/checkpoints/v3-<<>>_<<>>_<<>>_<<>>.tgz` - - *CM_EXTRACT_FOLDER*: `v3-<<>>_<<>>_<<>>_<<>>` - - *CM_ML_MODEL_FILE*: `v3-<<>>_<<>>_<<>>_<<>>.tflite` - - *CM_UNTAR*: `yes` - - Workflow: - * `_tf,from.zenodo,v1` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/2269307/files/mobilenet_v1_<<>>_<<>><<>>.tgz` - - *CM_UNTAR*: `yes` - - Workflow: - * `_tf,int8,v1,resolution-224,multiplier-1.0` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `70.762` - - Workflow: - * `_tf,v1` - - Environment variables: - - *CM_ML_MODEL_VER*: `1_<<>>_<<>><<>>_2018_08_02` - - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV1/Predictions/Reshape_1` - - *CM_ML_MODEL_WEIGHTS_FILE*: `mobilenet_v1_<<>>_<<>><<>>.ckpt.data-00000-of-00001` - - *CM_ML_MODEL_FILE*: `mobilenet_v1_<<>>_<<>><<>>.tflite` - - Workflow: - * `_tf,v1,fp32` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_NAME_PREFIX*: `` - - Workflow: - * `_tf,v1,int8` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_NAME_SUFFIX*: `_quant` - - Workflow: - * `_tf,v1,uint8` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_NAME_SUFFIX*: `_quant` - - Workflow: - * `_tf,v2,fp32` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_NAME_PREFIX*: `` - - *CM_ML_MODEL_VER*: `2_<<>>_<<>>` - - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV2/Predictions/Reshape_1` - - Workflow: - * `_tf,v2,int8` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_NAME_PREFIX*: `quantized` - - *CM_ML_MODEL_VER*: `2_<<>>_<<>>` - - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV2/Predictions/Softmax` - - Workflow: - * `_tf,v2,uint8` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_NAME_PREFIX*: `quantized` - - *CM_ML_MODEL_VER*: `2_<<>>_<<>>` - - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV2/Predictions/Softmax` - - Workflow: - * `_tf,v3` - - Environment variables: - - *CM_ML_MODEL_VER*: `3_<<>>_<<>>` - - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `MobilenetV3/Predictions/Softmax` - - Workflow: - * `_tflite` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * `_onnx` - - Environment variables: - - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` - - *CM_ML_MODEL_FRAMEWORK*: `onnx` - - Workflow: - * **`_tf`** (default) - - Environment variables: - - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` - - *CM_ML_MODEL_NORMALIZE_DATA*: `yes` - - *CM_ML_MODEL_SUBTRACT_MEANS*: `no` - - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input` - - Workflow: - -
    - - - * Group "**kind**" -
    - Click here to expand this section. - - * `_large` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_KIND*: `large` - - Workflow: - * `_large-minimalistic` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_KIND*: `large-minimalistic` - - Workflow: - * `_small` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_KIND*: `small` - - Workflow: - * `_small-minimalistic` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_KIND*: `small-minimalistic` - - Workflow: - -
    - - - * Group "**multiplier**" -
    - Click here to expand this section. - - * `_multiplier-0.25` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `0.25` - - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `25` - - Workflow: - * `_multiplier-0.35` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `0.35` - - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `35` - - Workflow: - * `_multiplier-0.5` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `0.5` - - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `50` - - Workflow: - * `_multiplier-0.75` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `0.75` - - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `75` - - Workflow: - * `_multiplier-1.0` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_MULTIPLIER*: `1.0` - - *CM_ML_MODEL_MOBILENET_MULTIPLIER_PERCENTAGE*: `100` - - Workflow: - -
    - - - * Group "**opset-version**" -
    - Click here to expand this section. - - * `_opset-11` - - Environment variables: - - *CM_ML_MODEL_ONNX_OPSET*: `11` - - Workflow: - * `_opset-8` - - Environment variables: - - *CM_ML_MODEL_ONNX_OPSET*: `8` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_PRECISION*: `fp32` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_MOBILENET_PRECISION*: `float` - - Workflow: - * `_int8` - - Environment variables: - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_PRECISION*: `int8` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_MOBILENET_PRECISION*: `int8` - - Workflow: - * `_uint8` - - Environment variables: - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `uint8` - - *CM_ML_MODEL_PRECISION*: `uint8` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `uint8` - - *CM_ML_MODEL_MOBILENET_PRECISION*: `uint8` - - Workflow: - -
    - - - * Group "**resolution**" -
    - Click here to expand this section. - - * `_resolution-128` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `128` - - *CM_ML_MODEL_IMAGE_HEIGHT*: `128` - - *CM_ML_MODEL_IMAGE_WIDTH*: `128` - - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.128` - - Workflow: - * `_resolution-160` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `160` - - *CM_ML_MODEL_IMAGE_HEIGHT*: `160` - - *CM_ML_MODEL_IMAGE_WIDTH*: `160` - - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.160` - - Workflow: - * `_resolution-192` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `192` - - *CM_ML_MODEL_IMAGE_HEIGHT*: `192` - - *CM_ML_MODEL_IMAGE_WIDTH*: `192` - - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.192` - - Workflow: - * `_resolution-224` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_RESOLUTION*: `224` - - *CM_ML_MODEL_IMAGE_HEIGHT*: `224` - - *CM_ML_MODEL_IMAGE_WIDTH*: `224` - - *CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS*: `_resolution.224` - - Workflow: - -
    - - - * Group "**source**" -
    - Click here to expand this section. - - * `_from.google` - - Environment variables: - - *CM_DOWNLOAD_SOURCE*: `google` - - Workflow: - * `_from.zenodo` - - Environment variables: - - *CM_DOWNLOAD_SOURCE*: `zenodo` - - Workflow: - -
    - - - * Group "**version**" -
    - Click here to expand this section. - - * `_v1` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_VERSION*: `1` - - *CM_ML_MODEL_FULL_NAME*: `mobilenet-v1-precision_<<>>-<<>>-<<>>` - - Workflow: - * `_v2` - - Environment variables: - - *CM_ML_MODEL_MOBILENET_VERSION*: `2` - - *CM_ML_MODEL_VER*: `2` - - *CM_ML_MODEL_FULL_NAME*: `mobilenet-v2-precision_<<>>-<<>>-<<>>` - - Workflow: - * **`_v3`** (default) - - Environment variables: - - *CM_ML_MODEL_MOBILENET_VERSION*: `3` - - *CM_ML_MODEL_VER*: `3` - - *CM_ML_MODEL_FULL_NAME*: `mobilenet-v3-precision_<<>>-<<>>-<<>>` - - Workflow: - -
    - - -#### Default variations - -`_fp32,_tf,_v3` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_ML_MODEL: `mobilenet` -* CM_ML_MODEL_DATASET: `imagenet2012-val` -* CM_ML_MODEL_RETRAINING: `no` -* CM_ML_MODEL_WEIGHT_TRANSFORMATIONS: `no` -* CM_ML_MODEL_INPUTS_DATA_TYPE: `fp32` -* CM_ML_MODEL_WEIGHTS_DATA_TYPE: `fp32` -* CM_ML_MODEL_MOBILENET_NAME_SUFFIX: `` - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-mobilenet/_cm.json) - -___ -### Script output -`cmr "get ml-model mobilenet raw ml-model-mobilenet image-classification [,variations]" -j` -#### New environment keys (filter) - -* `CM_DATASET_PREPROCESSED_IMAGENET_DEP_TAGS` -* `CM_ML_MODEL_*` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_FILE` -* `CM_ML_MODEL_FILE_WITH_PATH` -* `CM_ML_MODEL_PATH` -* `CM_ML_MODEL_STARTING_WEIGHTS_FILENAME` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-neuralmagic-zoo/index.md b/docs/AI-ML-models/get-ml-model-neuralmagic-zoo/index.md deleted file mode 100644 index 90b5c4731..000000000 --- a/docs/AI-ML-models/get-ml-model-neuralmagic-zoo/index.md +++ /dev/null @@ -1,335 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-neuralmagic-zoo** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-neuralmagic-zoo,adbb3f2525a14f97) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic` - -`cm run script --tags=get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic[,variations] ` - -*or* - -`cmr "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic"` - -`cmr "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ml-model,model,zoo,deepsparse,model-zoo,sparse-zoo,neuralmagic,neural-magic"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_bert-base-pruned90-none` - - Aliases: `_model-stub.zoo:nlp/question_answering/bert-base/pytorch/huggingface/squad/pruned90-none` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-base/pytorch/huggingface/squad/pruned90-none` - - *CM_ML_MODEL_FULL_NAME*: `bert-base-pruned90-none-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-base-uncased` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_RETRAINING*: `no` - - Workflow: - * `_bert-base-pruned95_obs_quant-none` - - Aliases: `_model-stub.zoo:nlp/question_answering/bert-base/pytorch/huggingface/squad/pruned95_obs_quant-none` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-base/pytorch/huggingface/squad/pruned95_obs_quant-none` - - *CM_ML_MODEL_FULL_NAME*: `bert-base-pruned95_obs_quant-none-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-base-uncased` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` - - *CM_ML_MODEL_RETRAINING*: `yes` - - Workflow: - * `_bert-base_cased-pruned90-none` - - Aliases: `_model-stub.zoo:nlp/question_answering/bert-base_cased/pytorch/huggingface/squad/pruned90-none` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-base_cased/pytorch/huggingface/squad/pruned90-none` - - *CM_ML_MODEL_FULL_NAME*: `bert-base_cased-pruned90-none-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-base-cased` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_RETRAINING*: `no` - - Workflow: - * `_bert-large-base-none` - - Aliases: `_model-stub.zoo:nlp/question_answering/bert-large/pytorch/huggingface/squad/base-none` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-large/pytorch/huggingface/squad/base-none` - - *CM_ML_MODEL_FULL_NAME*: `bert-large-base-none-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_RETRAINING*: `no` - - Workflow: - * `_bert-large-pruned80_quant-none-vnni` - - Aliases: `_model-stub.zoo:nlp/question_answering/bert-large/pytorch/huggingface/squad/pruned80_quant-none-vnni` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/bert-large/pytorch/huggingface/squad/pruned80_quant-none-vnni` - - *CM_ML_MODEL_FULL_NAME*: `bert-large-pruned80_quant-none-vnni-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` - - *CM_ML_MODEL_RETRAINING*: `no` - - Workflow: - * `_mobilebert-14layer_pruned50-none-vnni` - - Aliases: `_model-stub.zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/14layer_pruned50-none-vnni` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/14layer_pruned50-none-vnni` - - *CM_ML_MODEL_FULL_NAME*: `mobilebert-14layer_pruned50-none-vnni-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://storage.googleapis.com/cloud-tpu-checkpoints/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT.tar.gz` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_RETRAINING*: `no` - - Workflow: - * `_mobilebert-14layer_pruned50_quant-none-vnni` - - Aliases: `_model-stub.zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/14layer_pruned50_quant-none-vnni` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/14layer_pruned50_quant-none-vnni` - - *CM_ML_MODEL_FULL_NAME*: `mobilebert-14layer_pruned50_quant-none-vnni-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://storage.googleapis.com/cloud-tpu-checkpoints/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT.tar.gz` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` - - *CM_ML_MODEL_RETRAINING*: `yes` - - Workflow: - * `_mobilebert-base_quant-none` - - Aliases: `_model-stub.zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/base_quant-none` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/base_quant-none` - - *CM_ML_MODEL_FULL_NAME*: `mobilebert-base_quant-none-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://storage.googleapis.com/cloud-tpu-checkpoints/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT.tar.gz` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` - - *CM_ML_MODEL_RETRAINING*: `yes` - - Workflow: - * `_mobilebert-none-base-none` - - Aliases: `_model-stub.zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/base-none` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/mobilebert-none/pytorch/huggingface/squad/base-none` - - *CM_ML_MODEL_FULL_NAME*: `mobilebert-none-base-none-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://storage.googleapis.com/cloud-tpu-checkpoints/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT.tar.gz` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_RETRAINING*: `no` - - Workflow: - * `_model-stub.#` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `#` - - Workflow: - * `_obert-base-pruned90-none` - - Aliases: `_model-stub.zoo:nlp/question_answering/obert-base/pytorch/huggingface/squad/pruned90-none` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-base/pytorch/huggingface/squad/pruned90-none` - - *CM_ML_MODEL_FULL_NAME*: `obert-base-pruned90-none-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_RETRAINING*: `no` - - Workflow: - * `_obert-large-base-none` - - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/base-none` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/base-none` - - *CM_ML_MODEL_FULL_NAME*: `obert-large-base-none-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_RETRAINING*: `no` - - Workflow: - * `_obert-large-pruned95-none-vnni` - - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned95-none-vnni` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned95-none-vnni` - - *CM_ML_MODEL_FULL_NAME*: `obert-large-pruned95-none-vnni-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_RETRAINING*: `no` - - Workflow: - * `_obert-large-pruned95_quant-none-vnni` - - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned95_quant-none-vnni` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned95_quant-none-vnni` - - *CM_ML_MODEL_FULL_NAME*: `obert-large-pruned95_quant-none-vnni-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` - - *CM_ML_MODEL_RETRAINING*: `yes` - - Workflow: - * `_obert-large-pruned97-none` - - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned97-none` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned97-none` - - *CM_ML_MODEL_FULL_NAME*: `obert-large-pruned97-none-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_RETRAINING*: `no` - - Workflow: - * `_obert-large-pruned97-quant-none` - - Aliases: `_model-stub.zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned97_quant-none` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/obert-large/pytorch/huggingface/squad/pruned97_quant-none` - - *CM_ML_MODEL_FULL_NAME*: `obert-large-pruned97-quant-none-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/bert-large-uncased` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` - - *CM_ML_MODEL_RETRAINING*: `no` - - Workflow: - * `_oberta-base-pruned90-quant-none` - - Aliases: `_model-stub.zoo:nlp/question_answering/oberta-base/pytorch/huggingface/squad/pruned90_quant-none` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/oberta-base/pytorch/huggingface/squad/pruned90_quant-none` - - *CM_ML_MODEL_FULL_NAME*: `oberta-base-pruned90-quant-none-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/roberta-base` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` - - *CM_ML_MODEL_RETRAINING*: `no` - - Workflow: - * `_roberta-base-pruned85-quant-none` - - Aliases: `_model-stub.zoo:nlp/question_answering/roberta-base/pytorch/huggingface/squad/pruned85_quant-none` - - Environment variables: - - *CM_MODEL_ZOO_STUB*: `zoo:nlp/question_answering/roberta-base/pytorch/huggingface/squad/pruned85_quant-none` - - *CM_ML_MODEL_FULL_NAME*: `roberta-base-pruned85-quant-none-bert-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/roberta-base` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, unstructured pruning` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int64` - - *CM_ML_MODEL_RETRAINING*: `no` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/_cm.json)*** - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_package.protobuf - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_sparsezoo - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-neuralmagic-zoo/_cm.json) - -___ -### Script output -`cmr "get ml-model model zoo deepsparse model-zoo sparse-zoo neuralmagic neural-magic [,variations]" -j` -#### New environment keys (filter) - -* `CM_GET_DEPENDENT_CACHED_PATH` -* `CM_MLPERF_CUSTOM_MODEL_PATH` -* `CM_ML_MODEL*` -* `CM_MODEL_ZOO_STUB` -#### New environment keys auto-detected from customize - -* `CM_GET_DEPENDENT_CACHED_PATH` -* `CM_MLPERF_CUSTOM_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-resnet50/index.md b/docs/AI-ML-models/get-ml-model-resnet50/index.md deleted file mode 100644 index ff2c976a8..000000000 --- a/docs/AI-ML-models/get-ml-model-resnet50/index.md +++ /dev/null @@ -1,356 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-resnet50** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-resnet50,56203e4e998b4bc0) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,resnet50,ml-model-resnet50,image-classification* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get raw ml-model resnet50 ml-model-resnet50 image-classification" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,raw,ml-model,resnet50,ml-model-resnet50,image-classification` - -`cm run script --tags=get,raw,ml-model,resnet50,ml-model-resnet50,image-classification[,variations] ` - -*or* - -`cmr "get raw ml-model resnet50 ml-model-resnet50 image-classification"` - -`cmr "get raw ml-model resnet50 ml-model-resnet50 image-classification [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,raw,ml-model,resnet50,ml-model-resnet50,image-classification' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,raw,ml-model,resnet50,ml-model-resnet50,image-classification"``` - -#### Run this script via Docker (beta) - -`cm docker script "get raw ml-model resnet50 ml-model-resnet50 image-classification[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_batch_size.#` - - Environment variables: - - *CM_ML_MODEL_BATCH_SIZE*: `#` - - Workflow: - * `_batch_size.1` - - Environment variables: - - *CM_ML_MODEL_BATCH_SIZE*: `1` - - Workflow: - * `_fix-input-shape` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * `_from-tf` - - Workflow: - * `_huggingface_default` - - Environment variables: - - *CM_PACKAGE_URL*: `https://huggingface.co/ctuning/mlperf-inference-resnet50-onnx-fp32-imagenet2012-v1.0/resolve/main/resnet50_v1.onnx` - - Workflow: - * `_ncnn,fp32` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/8073420/files/resnet50_v1.bin?download=1` - - Workflow: - 1. ***Read "post_deps" on other CM scripts*** - * download-and-extract,_url.https://zenodo.org/record/8073420/files/resnet50_v1.param?download= - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - * `_onnx,from-tf` - - Environment variables: - - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` - - *CM_ML_MODEL_FRAMEWORK*: `onnx` - - *CM_ML_MODEL_INPUT_LAYERS*: `input_tensor` - - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor` - - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor\": (BATCH_SIZE, 224, 224, 3)` - - *CM_ML_MODEL_OUTPUT_LAYERS*: `softmax_tensor` - - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `softmax_tensor` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/2535873/files/resnet50_v1.pb` - - Workflow: - * `_onnx,from-tf,fp32` - - Environment variables: - - *CM_DOWNLOAD_FILENAME*: `resnet50_v1_modified.onnx` - - *CM_PACKAGE_URL*: `https://drive.google.com/uc?id=15wZ_8Vt12cb10IEBsln8wksD1zGwlbOM` - - Workflow: - * `_onnx,opset-11` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/4735647/files/resnet50_v1.onnx` - - Workflow: - * `_onnx,opset-8` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/2592612/files/resnet50_v1.onnx` - - Workflow: - * `_pytorch,fp32` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/4588417/files/resnet50-19c8e357.pth` - - Workflow: - * `_pytorch,int8` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/4589637/files/resnet50_INT8bit_quantized.pt` - - Workflow: - * `_tensorflow,fix-input-shape` - - Environment variables: - - *CM_ML_MODEL_TF_FIX_INPUT_SHAPE*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_package.tensorflow - * CM names: `--adr.['tensorflow']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_tflite,argmax` - - Environment variables: - - *CM_DAE_EXTRACT_DOWNLOADED*: `yes` - - *CM_DOWNLOAD_FINAL_ENV_NAME*: `` - - *CM_EXTRACT_FINAL_ENV_NAME*: `CM_ML_MODEL_FILE_WITH_PATH` - - *CM_ML_MODEL_FILE*: `resnet50_v1.tflite` - - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor 2\": (BATCH_SIZE, 224, 224, 3)` - - *CM_PACKAGE_URL*: `https://www.dropbox.com/s/cvv2zlfo80h54uz/resnet50_v1.tflite.gz?dl=1` - - Workflow: - * `_tflite,int8,no-argmax` - - Environment variables: - - *CM_DOWNLOAD_FINAL_ENV_NAME*: `CM_ML_MODEL_FILE_WITH_PATH` - - *CM_ML_MODEL_FILE*: `resnet50_quant_full_mlperf_edgetpu.tflite` - - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor 2\": (BATCH_SIZE, 224, 224, 3)` - - *CM_PACKAGE_URL*: `https://zenodo.org/record/8234946/files/resnet50_quant_full_mlperf_edgetpu.tflite?download=1` - - Workflow: - * `_tflite,no-argmax` - - Environment variables: - - *CM_ML_MODEL_FILE*: `resnet50_v1.no-argmax.tflite` - - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor 2\": (BATCH_SIZE, 224, 224, 3)` - - *CM_PACKAGE_URL*: `https://www.dropbox.com/s/vhuqo0wc39lky0a/resnet50_v1.no-argmax.tflite?dl=1` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * `_ncnn` - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `ncnn` - - Workflow: - * **`_onnx`** (default) - - Aliases: `_onnxruntime` - - Environment variables: - - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` - - *CM_ML_MODEL_FRAMEWORK*: `onnx` - - *CM_ML_MODEL_INPUT_LAYERS*: `input_tensor:0` - - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor:0` - - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor:0\": (BATCH_SIZE, 3, 224, 224)` - - *CM_ML_MODEL_OUTPUT_LAYERS*: `softmax_tensor:0` - - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `softmax_tensor:0` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `<<>>` - - *CM_ML_MODEL_VER*: `1.5` - - Workflow: - * `_pytorch` - - Environment variables: - - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` - - *CM_ML_MODEL_FRAMEWORK*: `pytorch` - - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `?` - - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor:0` - - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor:0\": [BATCH_SIZE, 3, 224, 224]` - - *CM_ML_MODEL_OUTPUT_LAYERS*: `output` - - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `?` - - *CM_ML_STARTING_WEIGHTS_FILENAME*: `<<>>` - - Workflow: - * `_tensorflow` - - Aliases: `_tf` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `76.456` - - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` - - *CM_ML_MODEL_FRAMEWORK*: `tensorflow` - - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `123.68 116.78 103.94` - - *CM_ML_MODEL_INPUT_LAYERS*: `input_tensor` - - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor` - - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor:0\": (BATCH_SIZE, 3, 224, 224)` - - *CM_ML_MODEL_NORMALIZE_DATA*: `0` - - *CM_ML_MODEL_OUTPUT_LAYERS*: `softmax_tensor` - - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `softmax_tensor` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `<<>>` - - *CM_ML_MODEL_SUBTRACT_MEANS*: `YES` - - *CM_PACKAGE_URL*: `https://zenodo.org/record/2535873/files/resnet50_v1.pb` - - Workflow: - * `_tflite` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `76.456` - - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` - - *CM_ML_MODEL_FRAMEWORK*: `tflite` - - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `123.68 116.78 103.94` - - *CM_ML_MODEL_INPUT_LAYERS*: `input_tensor` - - *CM_ML_MODEL_INPUT_LAYER_NAME*: `input_tensor` - - *CM_ML_MODEL_INPUT_SHAPES*: `\"input_tensor 2\": (BATCH_SIZE, 224, 224, 3)` - - *CM_ML_MODEL_NORMALIZE_DATA*: `0` - - *CM_ML_MODEL_OUTPUT_LAYERS*: `softmax_tensor` - - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `softmax_tensor` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `<<>>` - - *CM_ML_MODEL_SUBTRACT_MEANS*: `YES` - - Workflow: - -
    - - - * Group "**model-output**" -
    - Click here to expand this section. - - * **`_argmax`** (default) - - Environment variables: - - *CM_ML_MODEL_OUTPUT_LAYER_ARGMAX*: `yes` - - Workflow: - * `_no-argmax` - - Environment variables: - - *CM_ML_MODEL_OUTPUT_LAYER_ARGMAX*: `no` - - Workflow: - -
    - - - * Group "**opset-version**" -
    - Click here to expand this section. - - * `_opset-11` - - Environment variables: - - *CM_ML_MODEL_ONNX_OPSET*: `11` - - Workflow: - * `_opset-8` - - Environment variables: - - *CM_ML_MODEL_ONNX_OPSET*: `8` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` - - *CM_ML_MODEL_PRECISION*: `fp32` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` - - Workflow: - * `_int8` - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` - - *CM_ML_MODEL_PRECISION*: `int8` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` - - Workflow: - * `_uint8` - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` - - *CM_ML_MODEL_PRECISION*: `uint8` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` - - Workflow: - -
    - - -#### Default variations - -`_argmax,_fp32,_onnx` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/_cm.json)*** - * download-and-extract - * CM names: `--adr.['model-downloader']...` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - 1. ***Run native script if exists*** - * [run-fix-input.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/run-fix-input.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-resnet50/_cm.json) - -___ -### Script output -`cmr "get raw ml-model resnet50 ml-model-resnet50 image-classification [,variations]" -j` -#### New environment keys (filter) - -* `CM_ML_MODEL_*` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_FILE` -* `CM_ML_MODEL_FILE_WITH_PATH` -* `CM_ML_MODEL_STARTING_FILE_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-retinanet-nvidia/index.md b/docs/AI-ML-models/get-ml-model-retinanet-nvidia/index.md deleted file mode 100644 index 5fba8e668..000000000 --- a/docs/AI-ML-models/get-ml-model-retinanet-nvidia/index.md +++ /dev/null @@ -1,172 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-retinanet-nvidia** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-retinanet-nvidia,f059d249fac843ba) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,nvidia-retinanet,nvidia* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ml-model nvidia-retinanet nvidia" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ml-model,nvidia-retinanet,nvidia` - -`cm run script --tags=get,ml-model,nvidia-retinanet,nvidia[,variations] ` - -*or* - -`cmr "get ml-model nvidia-retinanet nvidia"` - -`cmr "get ml-model nvidia-retinanet nvidia [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ml-model,nvidia-retinanet,nvidia' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ml-model,nvidia-retinanet,nvidia"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ml-model nvidia-retinanet nvidia[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_efficient-nms` - - Environment variables: - - *CM_NVIDIA_EFFICIENT_NMS*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_polygraphy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_TORCH_DEVICE: `cpu` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlperf,training,src,_nvidia-retinanet - - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) - * get,mlperf,inference,src - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,ml-model,retinanet,_pytorch,_fp32,_weights - - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) - * get,generic-python-lib,_torch - * Enable this dependency only if all ENV vars are set:
    -`{'CM_TORCH_DEVICE': 'cpu'}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchvision - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_mlperf_logging - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_TORCH_DEVICE': 'cuda'}` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,generic-python-lib,_torch_cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_TORCH_DEVICE': 'cuda'}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,nvidia,mlperf,inference,common-code,-_custom - - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet-nvidia/_cm.json) - -___ -### Script output -`cmr "get ml-model nvidia-retinanet nvidia [,variations]" -j` -#### New environment keys (filter) - -* `CM_ML_MODEL_*` -* `CM_NVIDIA_RETINANET_*` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_ANCHOR_PATH` -* `CM_ML_MODEL_DYN_BATCHSIZE_PATH` -* `CM_NVIDIA_RETINANET_EFFICIENT_NMS_CONCAT_MODEL_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-retinanet/index.md b/docs/AI-ML-models/get-ml-model-retinanet/index.md deleted file mode 100644 index 4df53983b..000000000 --- a/docs/AI-ML-models/get-ml-model-retinanet/index.md +++ /dev/null @@ -1,225 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-retinanet** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-retinanet,427bc5665e4541c2) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,raw,resnext50,retinanet,object-detection* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ml-model raw resnext50 retinanet object-detection" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ml-model,raw,resnext50,retinanet,object-detection` - -`cm run script --tags=get,ml-model,raw,resnext50,retinanet,object-detection[,variations] ` - -*or* - -`cmr "get ml-model raw resnext50 retinanet object-detection"` - -`cmr "get ml-model raw resnext50 retinanet object-detection [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ml-model,raw,resnext50,retinanet,object-detection' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ml-model,raw,resnext50,retinanet,object-detection"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ml-model raw resnext50 retinanet object-detection[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_no-nms` - - Environment variables: - - *CM_TMP_ML_MODEL_RETINANET_NO_NMS*: `yes` - - *CM_ML_MODEL_RETINANET_NO_NMS*: `yes` - - *CM_QAIC_PRINT_NODE_PRECISION_INFO*: `yes` - - Workflow: - * `_onnx,fp32` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/6617879/files/resnext50_32x4d_fpn.onnx` - - *CM_DOWNLOAD_CHECKSUM*: `4544f4e56e0a4684215831cc937ea45c` - - *CM_ML_MODEL_ACCURACY*: `0.3757` - - Workflow: - * `_onnx,no-nms` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,python3 - * CM names: `--adr.['python, python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_package.onnx - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.onnxsim - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * download,file,_url.https://raw.githubusercontent.com/arjunsuresh/ck-qaic/main/package/model-onnx-mlperf-retinanet-no-nms/remove-nms-and-extract-priors.patch - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - * get,git,repo,_repo.https://github.com/mlcommons/training.git,_patch - * CM names: `--adr.['mlperf-training-src']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - * get,ml-model,retinanet,_pytorch,_fp32,_weights - * CM names: `--adr.['pytorch-weights']...` - - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) - * get,generic-python-lib,_package.torch - * CM names: `--adr.['torch', 'pytorch']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_pytorch,fp32` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/6617981/files/resnext50_32x4d_fpn.pth` - - *CM_ML_MODEL_ACCURACY*: `0.3755` - - Workflow: - * `_pytorch,fp32,weights` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/6605272/files/retinanet_model_10.zip?download=1` - - *CM_UNZIP*: `yes` - - *CM_ML_MODEL_FILE*: `retinanet_model_10.pth` - - *CM_ML_MODEL_ACCURACY*: `0.3755` - - Workflow: - * `_weights` - - Environment variables: - - *CM_MODEL_WEIGHTS_FILE*: `yes` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * **`_onnx`** (default) - - Environment variables: - - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` - - *CM_ML_MODEL_FRAMEWORK*: `onnx` - - Workflow: - * `_pytorch` - - Environment variables: - - *CM_ML_MODEL_DATA_LAYOUT*: `NCHW` - - *CM_ML_MODEL_FRAMEWORK*: `pytorch` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` - - *CM_ML_MODEL_PRECISION*: `fp32` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` - - Workflow: - -
    - - -#### Default variations - -`_fp32,_onnx` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/_cm.json)*** - * download-and-extract - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_TMP_ML_MODEL_RETINANET_NO_NMS': ['yes']}` - * CM names: `--adr.['dae']...` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - 1. ***Run native script if exists*** - * [run-no-nms.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/run-no-nms.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-retinanet/_cm.json) - -___ -### Script output -`cmr "get ml-model raw resnext50 retinanet object-detection [,variations]" -j` -#### New environment keys (filter) - -* `<<>>` -* `CM_ML_MODEL_*` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_FILE` -* `CM_ML_MODEL_FILE_WITH_PATH` -* `CM_ML_MODEL_RETINANET_QAIC_NODE_PRECISION_INFO_FILE_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-rnnt/index.md b/docs/AI-ML-models/get-ml-model-rnnt/index.md deleted file mode 100644 index 1d81ace52..000000000 --- a/docs/AI-ML-models/get-ml-model-rnnt/index.md +++ /dev/null @@ -1,192 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-rnnt** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-rnnt,8858f18b89774d28) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,rnnt,raw,librispeech,speech-recognition* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ml-model rnnt raw librispeech speech-recognition" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ml-model,rnnt,raw,librispeech,speech-recognition` - -`cm run script --tags=get,ml-model,rnnt,raw,librispeech,speech-recognition[,variations] ` - -*or* - -`cmr "get ml-model rnnt raw librispeech speech-recognition"` - -`cmr "get ml-model rnnt raw librispeech speech-recognition [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ml-model,rnnt,raw,librispeech,speech-recognition' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ml-model,rnnt,raw,librispeech,speech-recognition"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ml-model rnnt raw librispeech speech-recognition[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_pytorch,fp32` - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `0.07452253714852645` - - *CM_PACKAGE_URL*: `https://zenodo.org/record/3662521/files/DistributedDataParallel_1576581068.9962234-epoch-100.pt?download=1` - - Workflow: - * `_pytorch,fp32,amazon-s3` - - Environment variables: - - *CM_PACKAGE_URL*: `https://mlperf-public.s3.us-west-2.amazonaws.com/DistributedDataParallel_1576581068.9962234-epoch-100.pt` - - Workflow: - * `_pytorch,fp32,zenodo` - - Environment variables: - - *CM_PACKAGE_URL*: `https://zenodo.org/record/3662521/files/DistributedDataParallel_1576581068.9962234-epoch-100.pt?download=1` - - Workflow: - * `_weights` - - Environment variables: - - *CM_MODEL_WEIGHTS_FILE*: `yes` - - Workflow: - -
    - - - * Group "**download-src**" -
    - Click here to expand this section. - - * **`_amazon-s3`** (default) - - Workflow: - * `_zenodo` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * **`_pytorch`** (default) - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `pytorch` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` - - *CM_ML_MODEL_PRECISION*: `fp32` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` - - Workflow: - -
    - - -#### Default variations - -`_amazon-s3,_fp32,_pytorch` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-rnnt/_cm.json) - -___ -### Script output -`cmr "get ml-model rnnt raw librispeech speech-recognition [,variations]" -j` -#### New environment keys (filter) - -* `CM_ML_MODEL_*` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_FILE` -* `CM_ML_MODEL_FILE_WITH_PATH` -* `CM_ML_MODEL_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-stable-diffusion/index.md b/docs/AI-ML-models/get-ml-model-stable-diffusion/index.md deleted file mode 100644 index bf1378d78..000000000 --- a/docs/AI-ML-models/get-ml-model-stable-diffusion/index.md +++ /dev/null @@ -1,256 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-stable-diffusion** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-stable-diffusion,22c6516b2d4d4c23) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,stable-diffusion,sdxl,text-to-image* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get raw ml-model stable-diffusion sdxl text-to-image" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,raw,ml-model,stable-diffusion,sdxl,text-to-image` - -`cm run script --tags=get,raw,ml-model,stable-diffusion,sdxl,text-to-image[,variations] [--input_flags]` - -*or* - -`cmr "get raw ml-model stable-diffusion sdxl text-to-image"` - -`cmr "get raw ml-model stable-diffusion sdxl text-to-image [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,raw,ml-model,stable-diffusion,sdxl,text-to-image' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,raw,ml-model,stable-diffusion,sdxl,text-to-image"``` - -#### Run this script via Docker (beta) - -`cm docker script "get raw ml-model stable-diffusion sdxl text-to-image[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_batch_size.#` - - Environment variables: - - *CM_ML_MODEL_BATCH_SIZE*: `#` - - Workflow: - * `_pytorch,fp16` - - Workflow: - * `_pytorch,fp32` - - Environment variables: - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0` - - Workflow: - * `_rclone,fp16` - - Environment variables: - - *CM_DOWNLOAD_URL*: `mlc-inference:mlcommons-inference-wg-public/stable_diffusion_fp16` - - Workflow: - * `_rclone,fp32` - - Environment variables: - - *CM_DOWNLOAD_URL*: `mlc-inference:mlcommons-inference-wg-public/stable_diffusion_fp32` - - Workflow: - -
    - - - * Group "**download-source**" -
    - Click here to expand this section. - - * `_huggingface` - - Workflow: - * **`_mlcommons`** (default) - - Workflow: - -
    - - - * Group "**download-tool**" -
    - Click here to expand this section. - - * `_git` - - Environment variables: - - *CM_DOWNLOAD_TOOL*: `git` - - Workflow: - * `_rclone` - - Environment variables: - - *CM_RCLONE_CONFIG_CMD*: `rclone config create mlc-inference s3 provider=Cloudflare access_key_id=f65ba5eef400db161ea49967de89f47b secret_access_key=fbea333914c292b854f14d3fe232bad6c5407bf0ab1bebf78833c2b359bdfd2b endpoint=https://c2686074cb2caf5cbaf6d134bdba8b47.r2.cloudflarestorage.com` - - *CM_DOWNLOAD_TOOL*: `rclone` - - Workflow: - * `_wget` - - Environment variables: - - *CM_DOWNLOAD_TOOL*: `wget` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * **`_pytorch`** (default) - - Environment variables: - - *CM_ML_MODEL_FRAMEWORK*: `pytorch` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * `_fp16` - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp16` - - *CM_ML_MODEL_PRECISION*: `fp16` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp16` - - Workflow: - * **`_fp32`** (default) - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` - - *CM_ML_MODEL_PRECISION*: `fp32` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` - - Workflow: - * `_int8` - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` - - *CM_ML_MODEL_PRECISION*: `int8` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` - - Workflow: - * `_uint8` - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` - - *CM_ML_MODEL_PRECISION*: `uint8` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` - - Workflow: - -
    - - -#### Default variations - -`_fp32,_mlcommons,_pytorch` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--checkpoint=value` → `SDXL_CHECKPOINT_PATH=value` -* `--download_path=value` → `CM_DOWNLOAD_PATH=value` -* `--to=value` → `CM_DOWNLOAD_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "checkpoint":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/_cm.json)*** - * get,ml-model,huggingface,zoo,_clone-repo,_model-stub.stabilityai/stable-diffusion-xl-base-1.0 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_TMP_REQUIRE_DOWNLOAD': ['yes'], 'CM_DOWNLOAD_TOOL': ['git']}` - * CM names: `--adr.['hf-zoo']...` - - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) - * download-and-extract - * Enable this dependency only if all ENV vars are set:
    -`{'CM_TMP_REQUIRE_DOWNLOAD': ['yes'], 'CM_DOWNLOAD_TOOL': ['rclone']}` - * CM names: `--adr.['dae']...` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-stable-diffusion/_cm.json) - -___ -### Script output -`cmr "get raw ml-model stable-diffusion sdxl text-to-image [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_ML_MODEL_*` -* `SDXL_CHECKPOINT_PATH` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-ml-model-tiny-resnet/index.md b/docs/AI-ML-models/get-ml-model-tiny-resnet/index.md deleted file mode 100644 index b58796435..000000000 --- a/docs/AI-ML-models/get-ml-model-tiny-resnet/index.md +++ /dev/null @@ -1,213 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-tiny-resnet** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-tiny-resnet,dd5ec11c3f6e49eb) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification` - -`cm run script --tags=get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification[,variations] ` - -*or* - -`cmr "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification"` - -`cmr "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,raw,ml-model,resnet,pretrained,tiny,model,ic,ml-model-tiny-resnet,image-classification"``` - -#### Run this script via Docker (beta) - -`cm docker script "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_batch_size.#` - - Environment variables: - - *CM_ML_MODEL_BATCH_SIZE*: `#` - - Workflow: - * `_tflite,int8` - - Environment variables: - - *CM_PACKAGE_URL*: `https://github.com/mlcommons/tiny/raw/master/benchmark/training/image_classification/trained_models/pretrainedResnet_quant.tflite` - - *CM_DOWNLOAD_CHECKSUM*: `2d6dd48722471313e4c4528249205ae3` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * `_onnx` - - Environment variables: - - *CM_TMP_ML_MODEL_TF2ONNX*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,python3 - * CM names: `--adr.['python,python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,tiny,model,resnet,_tflite - * CM names: `--adr.['tflite-resnet-model', 'dependent-model']...` - - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) - * get,generic-python-lib,_package.tf2onnx - * CM names: `--adr.['tf2onnx']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * **`_tflite`** (default) - - Environment variables: - - *CM_ML_MODEL_ACCURACY*: `85` - - *CM_ML_MODEL_DATA_LAYOUT*: `NHWC` - - *CM_ML_MODEL_FRAMEWORK*: `tflite` - - *CM_ML_MODEL_GIVEN_CHANNEL_MEANS*: `` - - *CM_ML_MODEL_INPUT_LAYERS*: `` - - *CM_ML_MODEL_INPUT_LAYER_NAME*: `` - - *CM_ML_MODEL_INPUT_SHAPES*: `` - - *CM_ML_MODEL_NORMALIZE_DATA*: `0` - - *CM_ML_MODEL_OUTPUT_LAYERS*: `` - - *CM_ML_MODEL_OUTPUT_LAYER_NAME*: `` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `<<>>` - - *CM_ML_MODEL_SUBTRACT_MEANS*: `YES` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * `_fp32` - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `fp32` - - *CM_ML_MODEL_PRECISION*: `fp32` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `fp32` - - Workflow: - * **`_int8`** (default) - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `int8` - - *CM_ML_MODEL_PRECISION*: `int8` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `int8` - - Workflow: - * `_uint8` - - Environment variables: - - *CM_ML_MODEL_INPUT_DATA_TYPES*: `uint8` - - *CM_ML_MODEL_PRECISION*: `uint8` - - *CM_ML_MODEL_WEIGHT_DATA_TYPES*: `uint8` - - Workflow: - -
    - - -#### Default variations - -`_int8,_tflite` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/_cm.json)*** - * download-and-extract - * Enable this dependency only if all ENV vars are set:
    -`{'CM_PACKAGE_URL': ['on']}` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-tiny-resnet/_cm.json) - -___ -### Script output -`cmr "get raw ml-model resnet pretrained tiny model ic ml-model-tiny-resnet image-classification [,variations]" -j` -#### New environment keys (filter) - -* `CM_ML_MODEL_*` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_FILE` -* `CM_ML_MODEL_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo/index.md b/docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo/index.md deleted file mode 100644 index 9d979bdb8..000000000 --- a/docs/AI-ML-models/get-ml-model-using-imagenet-from-model-zoo/index.md +++ /dev/null @@ -1,147 +0,0 @@ -Automatically generated README for this automation recipe: **get-ml-model-using-imagenet-from-model-zoo** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-using-imagenet-from-model-zoo,153e08828c4e45cc) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,model-zoo,zoo,imagenet,image-classification* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ml-model model-zoo zoo imagenet image-classification" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ml-model,model-zoo,zoo,imagenet,image-classification` - -`cm run script --tags=get,ml-model,model-zoo,zoo,imagenet,image-classification[,variations] ` - -*or* - -`cmr "get ml-model model-zoo zoo imagenet image-classification"` - -`cmr "get ml-model model-zoo zoo imagenet image-classification [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ml-model,model-zoo,zoo,imagenet,image-classification' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ml-model,model-zoo,zoo,imagenet,image-classification"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ml-model model-zoo zoo imagenet image-classification[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**model-source**" -
    - Click here to expand this section. - - * `_model.#` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,ml-model,zoo,deepsparse,_model-stub.# - * CM names: `--adr.['neural-magic-zoo-downloader']...` - - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) - * `_model.resnet101-pytorch-base` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,ml-model,zoo,deepsparse,_model-stub.zoo:cv/classification/resnet_v1-101/pytorch/sparseml/imagenet/base-none - * CM names: `--adr.['neural-magic-zoo-downloader']...` - - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) - * `_model.resnet50-pruned95-uniform-quant` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,ml-model,zoo,deepsparse,_model-stub.zoo:cv/classification/resnet_v1-50/pytorch/sparseml/imagenet/pruned95_uniform_quant-none - * CM names: `--adr.['neural-magic-zoo-downloader']...` - - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ml-model-using-imagenet-from-model-zoo/_cm.json) - -___ -### Script output -`cmr "get ml-model model-zoo zoo imagenet image-classification [,variations]" -j` -#### New environment keys (filter) - -* `CM_ML_MODEL*` -#### New environment keys auto-detected from customize diff --git a/docs/AI-ML-models/get-tvm-model/index.md b/docs/AI-ML-models/get-tvm-model/index.md deleted file mode 100644 index 61775fb9f..000000000 --- a/docs/AI-ML-models/get-tvm-model/index.md +++ /dev/null @@ -1,288 +0,0 @@ -Automatically generated README for this automation recipe: **get-tvm-model** - -Category: **AI/ML models** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-tvm-model,c1b7b656b6224307) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ml-model-tvm,tvm-model* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ml-model-tvm tvm-model" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ml-model-tvm,tvm-model` - -`cm run script --tags=get,ml-model-tvm,tvm-model[,variations] ` - -*or* - -`cmr "get ml-model-tvm tvm-model"` - -`cmr "get ml-model-tvm tvm-model [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ml-model-tvm,tvm-model' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ml-model-tvm,tvm-model"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ml-model-tvm tvm-model[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_tune-model` - - Environment variables: - - *CM_TUNE_TVM_MODEL*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_xgboost - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pandas - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_tornado - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - - * Group "**batchsize**" -
    - Click here to expand this section. - - * `_batch_size.#` - - Environment variables: - - *CM_ML_MODEL_MAX_BATCH_SIZE*: `#` - - Workflow: - -
    - - - * Group "**frontend**" -
    - Click here to expand this section. - - * **`_onnx`** (default) - - Environment variables: - - *CM_TVM_FRONTEND_FRAMEWORK*: `onnx` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_onnx - * CM names: `--adr.['onnx']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_pytorch` - - Aliases: `_torch` - - Environment variables: - - *CM_TVM_FRONTEND_FRAMEWORK*: `pytorch` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_torch - * CM names: `--adr.['pytorch', 'torch']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchvision - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_tensorflow` - - Aliases: `_tf` - - Environment variables: - - *CM_TVM_FRONTEND_FRAMEWORK*: `tensorflow` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_tensorflow - * CM names: `--adr.['tensorflow']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_tflite` - - Environment variables: - - *CM_TVM_FRONTEND_FRAMEWORK*: `tflite` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_tflite - * CM names: `--adr.['tflite']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - - * Group "**model**" -
    - Click here to expand this section. - - * `_model.#` - - Environment variables: - - *CM_ML_MODEL*: `#` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Workflow: - * `_int8` - - Workflow: - * `_uint8` - - Workflow: - -
    - - - * Group "**runtime**" -
    - Click here to expand this section. - - * `_graph_executor` - - Environment variables: - - *CM_TVM_USE_VM*: `no` - - Workflow: - * **`_virtual_machine`** (default) - - Environment variables: - - *CM_TVM_USE_VM*: `yes` - - Workflow: - -
    - - -#### Default variations - -`_fp32,_onnx,_virtual_machine` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_ML_MODEL_MAX_BATCH_SIZE: `1` -* CM_TUNE_TVM_MODEL: `no` -* CM_TVM_USE_VM: `yes` -* CM_TVM_FRONTEND_FRAMEWORK: `onnx` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/_cm.json)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,tvm - * CM names: `--adr.['tvm']...` - - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) - * get,generic-python-lib,_decorator - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_psutil - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_scipy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_attrs - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/_cm.json)*** - * get,ml-model,raw - * CM names: `--adr.['original-model']...` - - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) - - CM script: [get-ml-model-bert-base-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-base-squad) - - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) - - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) - - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) - - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) - - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) - - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) - - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) - - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) - - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) - - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tvm-model/_cm.json) - -___ -### Script output -`cmr "get ml-model-tvm tvm-model [,variations]" -j` -#### New environment keys (filter) - -* `CM_ML_MODEL_*` -* `CM_TUNE_TVM_*` -* `CM_TVM_*` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_FILE` -* `CM_ML_MODEL_FILE_WITH_PATH` -* `CM_ML_MODEL_FRAMEWORK` -* `CM_ML_MODEL_INPUT_SHAPES` -* `CM_ML_MODEL_ORIGINAL_FILE_WITH_PATH` -* `CM_ML_MODEL_PATH` -* `CM_TUNE_TVM_MODEL` -* `CM_TVM_FRONTEND_FRAMEWORK` \ No newline at end of file diff --git a/docs/AI-ML-models/index.md b/docs/AI-ML-models/index.md new file mode 100644 index 000000000..a613e538a --- /dev/null +++ b/docs/AI-ML-models/index.md @@ -0,0 +1,23 @@ +The AI/ML models category contains the following scripts: + +- [convert-ml-model-huggingface-to-onnx](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/convert-ml-model-huggingface-to-onnx/README.md) +- [get-bert-squad-vocab](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-bert-squad-vocab/README.md) +- [get-dlrm](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-dlrm/README.md) +- [get-ml-model-3d-unet-kits19](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-3d-unet-kits19/README.md) +- [get-ml-model-bert-base-squad](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-bert-base-squad/README.md) +- [get-ml-model-bert-large-squad](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-bert-large-squad/README.md) +- [get-ml-model-dlrm-terabyte](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-dlrm-terabyte/README.md) +- [get-ml-model-efficientnet-lite](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-efficientnet-lite/README.md) +- [get-ml-model-gptj](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-gptj/README.md) +- [get-ml-model-huggingface-zoo](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-huggingface-zoo/README.md) +- [get-ml-model-llama2](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-llama2/README.md) +- [get-ml-model-mobilenet](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-mobilenet/README.md) +- [get-ml-model-neuralmagic-zoo](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-neuralmagic-zoo/README.md) +- [get-ml-model-resnet50](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-resnet50/README.md) +- [get-ml-model-retinanet](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-retinanet/README.md) +- [get-ml-model-retinanet-nvidia](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-retinanet-nvidia/README.md) +- [get-ml-model-rnnt](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-rnnt/README.md) +- [get-ml-model-stable-diffusion](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-stable-diffusion/README.md) +- [get-ml-model-tiny-resnet](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-tiny-resnet/README.md) +- [get-ml-model-using-imagenet-from-model-zoo](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ml-model-using-imagenet-from-model-zoo/README.md) +- [get-tvm-model](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-tvm-model/README.md) diff --git a/docs/AI-ML-optimization/calibrate-model-for.qaic/index.md b/docs/AI-ML-optimization/calibrate-model-for.qaic/index.md deleted file mode 100644 index 9441e4566..000000000 --- a/docs/AI-ML-optimization/calibrate-model-for.qaic/index.md +++ /dev/null @@ -1,289 +0,0 @@ -Automatically generated README for this automation recipe: **calibrate-model-for.qaic** - -Category: **AI/ML optimization** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=calibrate-model-for.qaic,817bad70df2f4e45) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *qaic,calibrate,profile,qaic-profile,qaic-calibrate* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "qaic calibrate profile qaic-profile qaic-calibrate" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=qaic,calibrate,profile,qaic-profile,qaic-calibrate` - -`cm run script --tags=qaic,calibrate,profile,qaic-profile,qaic-calibrate[,variations] ` - -*or* - -`cmr "qaic calibrate profile qaic-profile qaic-calibrate"` - -`cmr "qaic calibrate profile qaic-profile qaic-calibrate [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'qaic,calibrate,profile,qaic-profile,qaic-calibrate' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="qaic,calibrate,profile,qaic-profile,qaic-calibrate"``` - -#### Run this script via Docker (beta) - -`cm docker script "qaic calibrate profile qaic-profile qaic-calibrate[variations]" ` - -___ -### Customization - - -#### Variations - - * *Internal group (variations should not be selected manually)* -
    - Click here to expand this section. - - * `_bert_` - - Environment variables: - - *CM_QAIC_MODEL_NAME*: `bert-large` - - *CM_CREATE_INPUT_BATCH*: `no` - - Workflow: - -
    - - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_first.#` - - Workflow: - * `_resnet50,tf` - - Environment variables: - - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_resnet50_tf` - - Workflow: - -
    - - - * Group "**batch-size**" -
    - Click here to expand this section. - - * `_bs.#` - - Environment variables: - - *CM_QAIC_MODEL_BATCH_SIZE*: `#` - - *CM_CREATE_INPUT_BATCH*: `yes` - - Workflow: - * `_bs.1` - - Environment variables: - - *CM_QAIC_MODEL_BATCH_SIZE*: `1` - - *CM_CREATE_INPUT_BATCH*: `yes` - - Workflow: - -
    - - - * Group "**calib-dataset-filter-size**" -
    - Click here to expand this section. - - * `_filter-size.#` - - Workflow: - -
    - - - * Group "**calibration-option**" -
    - Click here to expand this section. - - * `_mlperf.option1` - - Workflow: - * `_mlperf.option2` - - Workflow: - -
    - - - * Group "**model**" -
    - Click here to expand this section. - - * `_bert-99` - - Environment variables: - - *CM_CALIBRATE_SQUAD*: `yes` - - *CM_QAIC_COMPILER_ARGS*: `` - - *CM_QAIC_COMPILER_PARAMS*: `-onnx-define-symbol=batch_size,1 -onnx-define-symbol=seg_length,<<>> -input-list-file=<<>> -num-histogram-bins=512 -profiling-threads=<<>>` - - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_bert_mlperf` - - Workflow: - * `_resnet50` - - Environment variables: - - *CM_QAIC_MODEL_NAME*: `resnet50` - - *CM_CALIBRATE_IMAGENET*: `yes` - - *CM_QAIC_COMPILER_ARGS*: `` - - *CM_QAIC_COMPILER_PARAMS*: `-output-node-name=ArgMax -profiling-threads=<<>>` - - *CM_QAIC_OUTPUT_NODE_NAME*: `-output-node-name=ArgMax` - - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_resnet50_tf` - - Workflow: - * `_retinanet` - - Environment variables: - - *CM_QAIC_MODEL_NAME*: `retinanet` - - *CM_CALIBRATE_OPENIMAGES*: `yes` - - *CM_QAIC_COMPILER_ARGS*: `` - - *CM_QAIC_COMPILER_PARAMS*: `-enable-channelwise -profiling-threads=<<>> -onnx-define-symbol=batch_size,<<>> -node-precision-info=<<>>` - - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_retinanet_no_nms_mlperf` - - Workflow: - -
    - - - * Group "**model-framework**" -
    - Click here to expand this section. - - * `_tf` - - Workflow: - -
    - - - * Group "**seq-length**" -
    - Click here to expand this section. - - * `_seq.#` - - Environment variables: - - *CM_DATASET_SQUAD_TOKENIZED_MAX_SEQ_LENGTH*: `#` - - Workflow: - * `_seq.384` - - Environment variables: - - *CM_DATASET_SQUAD_TOKENIZED_MAX_SEQ_LENGTH*: `#` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/_cm.json)*** - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,qaic,apps,sdk - * CM names: `--adr.['qaic-apps-sdk']...` - - CM script: [get-qaic-apps-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-apps-sdk) - * get,preprocessed,dataset,_calibration,openimages,_for.retinanet.onnx,_NCHW,_fp32,_custom-annotations - * Enable this dependency only if all ENV vars are set:
    -`{'CM_CALIBRATE_OPENIMAGES': ['yes']}` - * CM names: `--adr.['openimages-cal', 'preprocessed-dataset']...` - - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) - * get,dataset,imagenet,preprocessed,_calibration,_for.resnet50,_float32,_rgb32 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_CALIBRATE_IMAGENET': ['yes']}` - * CM names: `--adr.['imagenet-cal', 'preprocessed-calibration-dataset']...` - - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) - * get,dataset,preprocessed,_calib1,squad,_pickle,_seq-length.384,_packed - * Enable this dependency only if all ENV vars are set:
    -`{'CM_CALIBRATE_SQUAD': ['on']}` - * CM names: `--adr.['squad-cal', 'preprocessed-dataset']...` - - CM script: [get-preprocessed-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-squad) - * get,ml-model - * CM names: `--adr.['model-src']...` - - CM script: [get-ml-model-abtf-ssd-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/get-ml-model-abtf-ssd-pytorch) - - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) - - CM script: [get-ml-model-bert-base-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-base-squad) - - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) - - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) - - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) - - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) - - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) - - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) - - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) - - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) - - CM script: [get-ml-model-retinanet-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet-nvidia) - - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) - - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) - - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) - - CM script: [get-ml-model-using-imagenet-from-model-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-using-imagenet-from-model-zoo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/calibrate-model-for.qaic/_cm.json) - -___ -### Script output -`cmr "qaic calibrate profile qaic-profile qaic-calibrate [,variations]" -j` -#### New environment keys (filter) - -* `CM_QAIC_MODEL_PROFILE_*` -#### New environment keys auto-detected from customize - -* `CM_QAIC_MODEL_PROFILE_WITH_PATH` \ No newline at end of file diff --git a/docs/AI-ML-optimization/compile-model-for.qaic/index.md b/docs/AI-ML-optimization/compile-model-for.qaic/index.md deleted file mode 100644 index 686f6dec2..000000000 --- a/docs/AI-ML-optimization/compile-model-for.qaic/index.md +++ /dev/null @@ -1,438 +0,0 @@ -Automatically generated README for this automation recipe: **compile-model-for.qaic** - -Category: **AI/ML optimization** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=compile-model-for.qaic,3f0f43b5d0304d1c) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *qaic,compile,model,model-compile,qaic-compile* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "qaic compile model model-compile qaic-compile" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=qaic,compile,model,model-compile,qaic-compile` - -`cm run script --tags=qaic,compile,model,model-compile,qaic-compile[,variations] [--input_flags]` - -*or* - -`cmr "qaic compile model model-compile qaic-compile"` - -`cmr "qaic compile model model-compile qaic-compile [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'qaic,compile,model,model-compile,qaic-compile' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="qaic,compile,model,model-compile,qaic-compile"``` - -#### Run this script via Docker (beta) - -`cm docker script "qaic compile model model-compile qaic-compile[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_bert-99` - - Environment variables: - - *CM_COMPILE_BERT*: `on` - - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_bert_mlperf` - - *CM_QAIC_MODEL_COMPILER_PARAMS_BASE*: `-aic-hw -aic-hw-version=2.0 -execute-nodes-in-fp16=Add,Div,Erf,Softmax -quantization-schema=symmetric_with_uint8 -quantization-precision=Int8 -quantization-precision-bias=Int32 -vvv -compile-only -onnx-define-symbol=batch_size,1 -onnx-define-symbol=seg_length,384 -multicast-weights -combine-inputs=false -combine-outputs=false` - - *CM_QAIC_MODEL_COMPILER_ARGS*: `` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * calibrate,qaic,_bert-99 - * CM names: `--adr.['bert-profile', 'qaic-profile']...` - - CM script: [calibrate-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/calibrate-model-for.qaic) - * `_bert-99,offline` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS*: `-allocator-dealloc-delay=2 -size-split-granularity=1536 -vtcm-working-set-limit-ratio=1` - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=2` - - Workflow: - * `_bert-99,offline,nsp.14` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=3` - - Workflow: - * `_bert-99,offline,nsp.16` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=2` - - Workflow: - * `_bert-99,server` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS*: `-allocator-dealloc-delay=2 -size-split-granularity=1536 -vtcm-working-set-limit-ratio=1` - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=3` - - Workflow: - * `_bert-99,server,nsp.14` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=3` - - Workflow: - * `_bert-99,singlestream` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS*: `` - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=8 -ols=1` - - Workflow: - * `_bert-99,singlestream,nsp.14` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=8 -ols=1` - - Workflow: - * `_bert-99.9` - - Environment variables: - - *CM_COMPILE_BERT*: `on` - - *CM_QAIC_MODEL_TO_CONVERT*: `bert_mlperf` - - *CM_QAIC_MODEL_COMPILER_PARAMS_BASE*: `-aic-hw -aic-hw-version=2.0 -convert-to-fp16 -vvv -compile-only -onnx-define-symbol=batch_size,1 -onnx-define-symbol=seg_length,384 -combine-inputs=false -combine-outputs=false` - - *CM_QAIC_MODEL_COMPILER_ARGS*: `` - - Workflow: - * `_bert-99.9,offline` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2 -mos=1 -ols=2` - - Workflow: - * `_bert-99.9,offline,nsp.14` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2 -mos=1 -ols=2` - - Workflow: - * `_bert-99.9,offline,nsp.16` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2 -mos=1 -ols=2` - - Workflow: - * `_bert-99.9,server` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2` - - Workflow: - * `_bert-99.9,server,nsp.14` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=2` - - Workflow: - * `_resnet50` - - Environment variables: - - *CM_COMPILE_RESNET*: `on` - - *CM_QAIC_MODEL_TO_CONVERT*: `compile_resnet50_tf` - - *CM_QAIC_MODEL_COMPILER_PARAMS_BASE*: `-aic-hw -aic-hw-version=2.0 -quantization-schema=symmetric_with_uint8 -quantization-precision=Int8 -output-node-name=ArgMax -vvv -compile-only -use-producer-dma=1` - - Workflow: - * `_resnet50,multistream` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS*: `` - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -mos=1 -ols=1` - - Workflow: - * `_resnet50,multistream,nsp.14` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4` - - Workflow: - * `_resnet50,offline` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS*: `-sdp-cluster-sizes=2,2 -multicast-weights` - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -mos=1,2 -ols=4` - - Workflow: - * `_resnet50,offline,nsp.14` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -mos=1,2 -ols=4` - - Workflow: - * `_resnet50,server` - - Workflow: - * `_resnet50,server,nsp.14` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -ols=4` - - *CM_QAIC_MODEL_COMPILER_ARGS*: `-sdp-cluster-sizes=2,2 -mos=1,2 -multicast-weights` - - Workflow: - * `_resnet50,server,nsp.16` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=4 -ols=4` - - *CM_QAIC_MODEL_COMPILER_ARGS*: `-sdp-cluster-sizes=4,4 -mos=1,4` - - Workflow: - * `_resnet50,singlestream` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS*: `-aic-num-of-instances=1` - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=1 -ols=1` - - Workflow: - * `_resnet50,singlestream,nsp.14` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=1 -ols=1` - - Workflow: - * `_resnet50,tf` - - Environment variables: - - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_resnet50_tf` - - Workflow: - * `_retinanet` - - Environment variables: - - *CM_COMPILE_RETINANET*: `on` - - *CM_QAIC_MODEL_TO_CONVERT*: `calibrate_retinanet_no_nms_mlperf` - - *CM_QAIC_MODEL_COMPILER_ARGS*: `-aic-enable-depth-first` - - *CM_QAIC_MODEL_COMPILER_PARAMS_BASE*: `-aic-hw -aic-hw-version=2.0 -compile-only -enable-channelwise -onnx-define-symbol=batch_size,1 -node-precision-info=<<>> -quantization-schema-constants=symmetric_with_uint8 -quantization-schema-activations=asymmetric -quantization-calibration=None` - - Workflow: - * `_retinanet,multistream` - - Workflow: - * `_retinanet,nsp.14` - - Workflow: - * `_retinanet,offline` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=1 -mos=1 -ols=1` - - Workflow: - * `_retinanet,offline,nsp.14` - - Workflow: - * `_retinanet,server` - - Workflow: - * `_retinanet,server,nsp.14` - - Workflow: - * `_retinanet,singlestream` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS*: `` - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=1 -ols=1` - - Workflow: - * `_retinanet,singlestream,nsp.14` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_ARGS_SUT*: `-aic-num-cores=8 -mos=1 -ols=1` - - Workflow: - -
    - - - * Group "**batch-size**" -
    - Click here to expand this section. - - * `_bs.#` - - Environment variables: - - *CM_QAIC_MODEL_BATCH_SIZE*: `#` - - Workflow: - * `_bs.1` - - Environment variables: - - *CM_QAIC_MODEL_BATCH_SIZE*: `1` - - Workflow: - -
    - - - * Group "**calib-dataset-filter-size**" -
    - Click here to expand this section. - - * `_filter-size.#` - - Workflow: - -
    - - - * Group "**mlperf-scenario**" -
    - Click here to expand this section. - - * `_multistream` - - Workflow: - * `_offline` - - Workflow: - * `_server` - - Workflow: - * **`_singlestream`** (default) - - Workflow: - -
    - - - * Group "**model-framework**" -
    - Click here to expand this section. - - * `_tf` - - Workflow: - -
    - - - * Group "**nsp**" -
    - Click here to expand this section. - - * `_nsp.14` - - Workflow: - * `_nsp.16` - - Workflow: - * `_nsp.8` - - Workflow: - * `_nsp.9` - - Workflow: - -
    - - - * Group "**percentile-calibration**" -
    - Click here to expand this section. - - * `_pc.#` - - Environment variables: - - *CM_QAIC_MODEL_COMPILER_PERCENTILE_CALIBRATION_VALUE*: `#` - - *CM_QAIC_MODEL_COMPILER_QUANTIZATION_PARAMS*: `-quantization-calibration=Percentile -percentile-calibration-value=<<>>` - - Workflow: - -
    - - - * Group "**quantization**" -
    - Click here to expand this section. - - * `_no-quantized` - - Environment variables: - - *CM_QAIC_MODEL_QUANTIZATION*: `no` - - Workflow: - * **`_quantized`** (default) - - Environment variables: - - *CM_QAIC_MODEL_QUANTIZATION*: `yes` - - Workflow: - -
    - - -#### Default variations - -`_quantized,_singlestream` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--register=value` → `CM_REGISTER_CACHE=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "register":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/_cm.json)*** - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,qaic,apps,sdk - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_REGISTER_CACHE': ['on']}` - * CM names: `--adr.['qaic-apps-sdk']...` - - CM script: [get-qaic-apps-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-apps-sdk) - * qaic,calibrate,_retinanet - * Enable this dependency only if all ENV vars are set:
    -`{'CM_COMPILE_RETINANET': ['yes']}` - * CM names: `--adr.['retinanet-profile', 'qaic-profile']...` - - CM script: [calibrate-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/calibrate-model-for.qaic) - * qaic,calibrate,_resnet50 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_COMPILE_RESNET': ['on']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_REGISTER_CACHE': ['on']}` - * CM names: `--adr.['resnet-profile', 'qaic-profile']...` - - CM script: [calibrate-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/calibrate-model-for.qaic) - * get,ml-model - * CM names: `--adr.['model-src']...` - - CM script: [get-ml-model-abtf-ssd-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/get-ml-model-abtf-ssd-pytorch) - - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) - - CM script: [get-ml-model-bert-base-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-base-squad) - - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) - - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) - - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) - - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) - - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) - - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) - - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) - - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) - - CM script: [get-ml-model-retinanet-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet-nvidia) - - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) - - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) - - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) - - CM script: [get-ml-model-using-imagenet-from-model-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-using-imagenet-from-model-zoo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-model-for.qaic/_cm.json) - -___ -### Script output -`cmr "qaic compile model model-compile qaic-compile [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_ML_MODEL_FILE_WITH_PATH` -* `CM_QAIC_MODEL*` -#### New environment keys auto-detected from customize - -* `CM_ML_MODEL_FILE_WITH_PATH` -* `CM_QAIC_MODEL_COMPILED_BINARY_WITH_PATH` -* `CM_QAIC_MODEL_FINAL_COMPILATION_CMD` \ No newline at end of file diff --git a/docs/AI-ML-optimization/index.md b/docs/AI-ML-optimization/index.md new file mode 100644 index 000000000..0eed9529e --- /dev/null +++ b/docs/AI-ML-optimization/index.md @@ -0,0 +1,5 @@ +The AI/ML optimization category contains the following scripts: + +- [calibrate-model-for.qaic](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/calibrate-model-for.qaic/README.md) +- [compile-model-for.qaic](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/compile-model-for.qaic/README.md) +- [prune-bert-models](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/prune-bert-models/README.md) diff --git a/docs/AI-ML-optimization/prune-bert-models/index.md b/docs/AI-ML-optimization/prune-bert-models/index.md deleted file mode 100644 index b491bf9cf..000000000 --- a/docs/AI-ML-optimization/prune-bert-models/index.md +++ /dev/null @@ -1,185 +0,0 @@ -Automatically generated README for this automation recipe: **prune-bert-models** - -Category: **AI/ML optimization** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=prune-bert-models,76182d4896414216) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *prune,bert-models,bert-prune,prune-bert-models* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "prune bert-models bert-prune prune-bert-models" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=prune,bert-models,bert-prune,prune-bert-models` - -`cm run script --tags=prune,bert-models,bert-prune,prune-bert-models[,variations] [--input_flags]` - -*or* - -`cmr "prune bert-models bert-prune prune-bert-models"` - -`cmr "prune bert-models bert-prune prune-bert-models [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'prune,bert-models,bert-prune,prune-bert-models' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="prune,bert-models,bert-prune,prune-bert-models"``` - -#### Run this script via Docker (beta) - -`cm docker script "prune bert-models bert-prune prune-bert-models[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_model.#` - - Environment variables: - - *CM_BERT_PRUNE_MODEL_NAME*: `#` - - *CM_MODEL_ZOO_STUB*: `#` - - Workflow: - * `_path.#` - - Environment variables: - - *CM_BERT_PRUNE_CKPT_PATH*: `#` - - Workflow: - * `_task.#` - - Environment variables: - - *CM_BERT_PRUNE_TASK*: `#` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--constraint=value` → `CM_BERT_PRUNE_CONSTRAINT=value` -* `--output_dir=value` → `CM_BERT_PRUNE_OUTPUT_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "constraint":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_BERT_PRUNE_TASK: `squad` -* CM_BERT_PRUNE_MODEL_NAME: `bert-large-uncased` -* CM_MODEL_ZOO_STUB: `bert-large-uncased` -* CM_BERT_PRUNE_CONSTRAINT: `0.5` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/_cm.json)*** - * get,python3 - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_scipy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_cupy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_tqdm - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch_cuda - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_datasets - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_transformers - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_scikit-learn - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,git,repo,_repo.https://github.com/cknowledge/retraining-free-pruning - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - * get,ml-model,model,zoo,model-zoo,huggingface,_prune - * CM names: `--adr.['get-model']...` - - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-bert-models/_cm.json) - -___ -### Script output -`cmr "prune bert-models bert-prune prune-bert-models [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/CM-Interface/index.md b/docs/CM-Interface/index.md new file mode 100644 index 000000000..72b9ec559 --- /dev/null +++ b/docs/CM-Interface/index.md @@ -0,0 +1,3 @@ +The CM Interface category contains the following scripts: + +- [get-cache-dir](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-cache-dir/README.md) diff --git a/docs/CM-automation/index.md b/docs/CM-automation/index.md new file mode 100644 index 000000000..fb837e817 --- /dev/null +++ b/docs/CM-automation/index.md @@ -0,0 +1,3 @@ +The CM automation category contains the following scripts: + +- [create-custom-cache-entry](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/create-custom-cache-entry/README.md) diff --git a/docs/CM-interface-prototyping/index.md b/docs/CM-interface-prototyping/index.md new file mode 100644 index 000000000..e727d9f1d --- /dev/null +++ b/docs/CM-interface-prototyping/index.md @@ -0,0 +1,4 @@ +The CM interface prototyping category contains the following scripts: + +- [test-debug](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/test-debug/README.md) +- [test-mlperf-inference-retinanet](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/test-mlperf-inference-retinanet/README.md) diff --git a/docs/CM-interface-prototyping/test-mlperf-inference-retinanet/index.md b/docs/CM-interface-prototyping/test-mlperf-inference-retinanet/index.md deleted file mode 100644 index 76c94e570..000000000 --- a/docs/CM-interface-prototyping/test-mlperf-inference-retinanet/index.md +++ /dev/null @@ -1,135 +0,0 @@ -Automatically generated README for this automation recipe: **test-mlperf-inference-retinanet** - -Category: **CM interface prototyping** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=test-mlperf-inference-retinanet,1cedbc3b642a403a) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *test,mlperf-inference-win,retinanet,windows* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "test mlperf-inference-win retinanet windows" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=test,mlperf-inference-win,retinanet,windows` - -`cm run script --tags=test,mlperf-inference-win,retinanet,windows ` - -*or* - -`cmr "test mlperf-inference-win retinanet windows"` - -`cmr "test mlperf-inference-win retinanet windows " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'test,mlperf-inference-win,retinanet,windows' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="test,mlperf-inference-win,retinanet,windows"``` - -#### Run this script via Docker (beta) - -`cm docker script "test mlperf-inference-win retinanet windows" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/_cm.json)*** - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_requests - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,loadgen - * CM names: `--adr.['loadgen', 'mlperf-inference-loadgen']...` - - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) - * mlperf,inference,source - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,dataset,open-images,original - - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) - * get,raw,ml-model,retinanet - - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-mlperf-inference-retinanet/_cm.json) - -___ -### Script output -`cmr "test mlperf-inference-win retinanet windows " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/CUDA-automation/get-cuda-devices/index.md b/docs/CUDA-automation/get-cuda-devices/index.md deleted file mode 100644 index 931e10be8..000000000 --- a/docs/CUDA-automation/get-cuda-devices/index.md +++ /dev/null @@ -1,122 +0,0 @@ -Automatically generated README for this automation recipe: **get-cuda-devices** - -Category: **CUDA automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cuda-devices,7a3ede4d3558427a) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,cuda-devices* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get cuda-devices" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,cuda-devices` - -`cm run script --tags=get,cuda-devices ` - -*or* - -`cmr "get cuda-devices"` - -`cmr "get cuda-devices " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,cuda-devices' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,cuda-devices"``` - -#### Run this script via Docker (beta) - -`cm docker script "get cuda-devices" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/_cm.json)*** - * get,cuda,_toolkit - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda-devices/_cm.json) - -___ -### Script output -`cmr "get cuda-devices " -j` -#### New environment keys (filter) - -* `CM_CUDA_DEVICE_*` -#### New environment keys auto-detected from customize diff --git a/docs/CUDA-automation/get-cuda/index.md b/docs/CUDA-automation/get-cuda/index.md deleted file mode 100644 index d0b49f561..000000000 --- a/docs/CUDA-automation/get-cuda/index.md +++ /dev/null @@ -1,230 +0,0 @@ -Automatically generated README for this automation recipe: **get-cuda** - -Category: **CUDA automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cuda,46d133d9ef92422d) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- - -# System dependencies - -* Download [CUDA toolkit](https://developer.nvidia.com/cuda-toolkit). -* Download [cuDNN](https://developer.nvidia.com/rdp/cudnn-download). -* Download [TensorRT](https://developer.nvidia.com/nvidia-tensorrt-8x-download). - - - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda` - -`cm run script --tags=get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda[,variations] [--input_flags]` - -*or* - -`cmr "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda"` - -`cmr "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,cuda,cuda-compiler,cuda-lib,toolkit,lib,nvcc,get-nvcc,get-cuda"``` - -#### Run this script via Docker (beta) - -`cm docker script "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_cudnn` - - Environment variables: - - *CM_CUDA_NEEDS_CUDNN*: `yes` - - Workflow: - 1. ***Read "post_deps" on other CM scripts*** - * get,nvidia,cudnn - * CM names: `--adr.['cudnn']...` - - CM script: [get-cudnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cudnn) - * `_package-manager` - - Environment variables: - - *CM_CUDA_PACKAGE_MANAGER_INSTALL*: `yes` - - Workflow: - -
    - - - * Group "**installation-mode**" -
    - Click here to expand this section. - - * `_lib-only` - - Environment variables: - - *CM_CUDA_FULL_TOOLKIT_INSTALL*: `no` - - *CM_TMP_FILE_TO_CHECK_UNIX*: `libcudart.so` - - *CM_TMP_FILE_TO_CHECK_WINDOWS*: `libcudart.dll` - - Workflow: - * **`_toolkit`** (default) - - Environment variables: - - *CM_CUDA_FULL_TOOLKIT_INSTALL*: `yes` - - *CM_TMP_FILE_TO_CHECK_UNIX*: `nvcc` - - *CM_TMP_FILE_TO_CHECK_WINDOWS*: `nvcc.exe` - - Workflow: - -
    - - -#### Default variations - -`_toolkit` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--cudnn_tar_file=value` → `CM_CUDNN_TAR_FILE_PATH=value` -* `--cudnn_tar_path=value` → `CM_CUDNN_TAR_FILE_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "cudnn_tar_file":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_CUDA_PATH_LIB_CUDNN_EXISTS: `no` -* CM_REQUIRE_INSTALL: `no` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,cl - * Enable this dependency only if all ENV vars are set:
    -`{'CM_CUDA_FULL_TOOLKIT_INSTALL': ['yes'], 'CM_HOST_OS_TYPE': ['windows']}` - * CM names: `--adr.['compiler']...` - - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/_cm.json)*** - * install,cuda,prebuilt - * Enable this dependency only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - * CM names: `--adr.['install-cuda-prebuilt']...` - - CM script: [install-cuda-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cuda-prebuilt) - * get,generic-sys-util,_nvidia-cuda-toolkit - * Enable this dependency only if all ENV vars are set:
    -`{'CM_CUDA_PACKAGE_MANAGER_INSTALL': ['yes']}` - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cuda/_cm.json) - -___ -### Script output -`cmr "get cuda cuda-compiler cuda-lib toolkit lib nvcc get-nvcc get-cuda [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `+ LDFLAGS` -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+DYLD_FALLBACK_LIBRARY_PATH` -* `+LD_LIBRARY_PATH` -* `+PATH` -* `CM_CUDA_*` -* `CM_NVCC_*` -* `CUDA_HOME` -* `CUDA_PATH` -#### New environment keys auto-detected from customize - -* `CM_CUDA_CACHE_TAGS` -* `CM_CUDA_FULL_TOOLKIT_INSTALL` -* `CM_CUDA_INSTALLED_PATH` -* `CM_CUDA_PATH_BIN` -* `CM_CUDA_PATH_INCLUDE` -* `CM_CUDA_PATH_LIB` -* `CM_CUDA_VERSION` -* `CM_CUDA_VERSION_STRING` -* `CM_NVCC_BIN` \ No newline at end of file diff --git a/docs/CUDA-automation/get-cudnn/index.md b/docs/CUDA-automation/get-cudnn/index.md deleted file mode 100644 index 224fb2641..000000000 --- a/docs/CUDA-automation/get-cudnn/index.md +++ /dev/null @@ -1,167 +0,0 @@ -Automatically generated README for this automation recipe: **get-cudnn** - -Category: **CUDA automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cudnn,d73ee19baee14df8) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,cudnn,nvidia* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get cudnn nvidia" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,cudnn,nvidia` - -`cm run script --tags=get,cudnn,nvidia [--input_flags]` - -*or* - -`cmr "get cudnn nvidia"` - -`cmr "get cudnn nvidia " [--input_flags]` - - - -#### Input Flags - -* --**input**=Full path to the installed cuDNN library -* --**tar_file**=Full path to the cuDNN Tar file downloaded from Nvidia website (https://developer.nvidia.com/cudnn) - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "input":...} -``` -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,cudnn,nvidia' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,cudnn,nvidia"``` - -#### Run this script via Docker (beta) - -`cm docker script "get cudnn nvidia" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--input=value` → `CM_INPUT=value` -* `--tar_file=value` → `CM_CUDNN_TAR_FILE_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "input":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_SUDO: `sudo` -* CM_INPUT: `` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,cuda - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CUDA_PATH_LIB': ['on'], 'CM_CUDA_PATH_INCLUDE': ['on']}` - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cudnn/_cm.json) - -___ -### Script output -`cmr "get cudnn nvidia " [--input_flags] -j` -#### New environment keys (filter) - -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+DYLD_FALLBACK_LIBRARY_PATH` -* `+LD_LIBRARY_PATH` -* `+PATH` -* `CM_CUDA_PATH_INCLUDE_CUDNN` -* `CM_CUDA_PATH_LIB_CUDNN` -* `CM_CUDA_PATH_LIB_CUDNN_EXISTS` -* `CM_CUDNN_*` -#### New environment keys auto-detected from customize - -* `CM_CUDA_PATH_INCLUDE_CUDNN` -* `CM_CUDA_PATH_LIB_CUDNN` -* `CM_CUDA_PATH_LIB_CUDNN_EXISTS` -* `CM_CUDNN_VERSION` \ No newline at end of file diff --git a/docs/CUDA-automation/get-tensorrt/index.md b/docs/CUDA-automation/get-tensorrt/index.md deleted file mode 100644 index 07153e153..000000000 --- a/docs/CUDA-automation/get-tensorrt/index.md +++ /dev/null @@ -1,176 +0,0 @@ -Automatically generated README for this automation recipe: **get-tensorrt** - -Category: **CUDA automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-tensorrt,2a84ca505e4c408d) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,tensorrt,nvidia* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get tensorrt nvidia" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,tensorrt,nvidia` - -`cm run script --tags=get,tensorrt,nvidia[,variations] [--input_flags]` - -*or* - -`cmr "get tensorrt nvidia"` - -`cmr "get tensorrt nvidia [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - - -#### Input Flags - -* --**input**=Full path to the installed TensorRT library (nvinfer) -* --**tar_file**=Full path to the TensorRT Tar file downloaded from the Nvidia website (https://developer.nvidia.com/tensorrt) - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "input":...} -``` -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,tensorrt,nvidia' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,tensorrt,nvidia"``` - -#### Run this script via Docker (beta) - -`cm docker script "get tensorrt nvidia[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_dev` - - Environment variables: - - *CM_TENSORRT_REQUIRE_DEV*: `yes` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--input=value` → `CM_INPUT=value` -* `--tar_file=value` → `CM_TENSORRT_TAR_FILE_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "input":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-tensorrt/_cm.json) - -___ -### Script output -`cmr "get tensorrt nvidia [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `+ LDFLAGS` -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+DYLD_FALLBACK_LIBRARY_PATH` -* `+LD_LIBRARY_PATH` -* `+PATH` -* `CM_TENSORRT_*` -#### New environment keys auto-detected from customize - -* `CM_TENSORRT_INSTALL_PATH` -* `CM_TENSORRT_LIB_PATH` -* `CM_TENSORRT_VERSION` \ No newline at end of file diff --git a/docs/CUDA-automation/index.md b/docs/CUDA-automation/index.md new file mode 100644 index 000000000..0e6e14fc9 --- /dev/null +++ b/docs/CUDA-automation/index.md @@ -0,0 +1,8 @@ +The CUDA automation category contains the following scripts: + +- [get-cuda](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-cuda/README.md) +- [get-cuda-devices](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-cuda-devices/README.md) +- [get-cudnn](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-cudnn/README.md) +- [get-tensorrt](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-tensorrt/README.md) +- [install-cuda-package-manager](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-cuda-package-manager/README.md) +- [install-cuda-prebuilt](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-cuda-prebuilt/README.md) diff --git a/docs/CUDA-automation/install-cuda-package-manager/index.md b/docs/CUDA-automation/install-cuda-package-manager/index.md deleted file mode 100644 index e08286c94..000000000 --- a/docs/CUDA-automation/install-cuda-package-manager/index.md +++ /dev/null @@ -1,124 +0,0 @@ -Automatically generated README for this automation recipe: **install-cuda-package-manager** - -Category: **CUDA automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-cuda-package-manager,c1afdff8542f45be) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,package-manager,cuda,package-manager-cuda,install-pm-cuda* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install package-manager cuda package-manager-cuda install-pm-cuda" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,package-manager,cuda,package-manager-cuda,install-pm-cuda` - -`cm run script --tags=install,package-manager,cuda,package-manager-cuda,install-pm-cuda ` - -*or* - -`cmr "install package-manager cuda package-manager-cuda install-pm-cuda"` - -`cmr "install package-manager cuda package-manager-cuda install-pm-cuda " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,package-manager,cuda,package-manager-cuda,install-pm-cuda' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,package-manager,cuda,package-manager-cuda,install-pm-cuda"``` - -#### Run this script via Docker (beta) - -`cm docker script "install package-manager cuda package-manager-cuda install-pm-cuda" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/_cm.json) - 1. ***Run native script if exists*** - * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/run-ubuntu.sh) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/_cm.json) - 1. Run "postrocess" function from customize.py - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-package-manager/_cm.json)*** - * get,cuda - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - -___ -### Script output -`cmr "install package-manager cuda package-manager-cuda install-pm-cuda " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/CUDA-automation/install-cuda-prebuilt/index.md b/docs/CUDA-automation/install-cuda-prebuilt/index.md deleted file mode 100644 index 16a3fe50b..000000000 --- a/docs/CUDA-automation/install-cuda-prebuilt/index.md +++ /dev/null @@ -1,180 +0,0 @@ -Automatically generated README for this automation recipe: **install-cuda-prebuilt** - -Category: **CUDA automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-cuda-prebuilt,14eadcd42ba340c3) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda` - -`cm run script --tags=install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda[,variations] [--input_flags]` - -*or* - -`cmr "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda"` - -`cmr "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,prebuilt,cuda,prebuilt-cuda,install-prebuilt-cuda"``` - -#### Run this script via Docker (beta) - -`cm docker script "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**install-driver**" -
    - Click here to expand this section. - - * `_driver` - - Environment variables: - - *CM_CUDA_INSTALL_DRIVER*: `yes` - - Workflow: - * **`_no-driver`** (default) - - Environment variables: - - *CM_CUDA_INSTALL_DRIVER*: `no` - - Workflow: - -
    - - -#### Default variations - -`_no-driver` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--local_run_file_path=value` → `CUDA_RUN_FILE_LOCAL_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "local_run_file_path":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_SUDO: `sudo` - -
    - -#### Versions -Default version: `11.8.0` - -* `11.7.0` -* `11.8.0` -* `12.0.0` -* `12.1.1` -* `12.2.0` -* `12.3.2` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/_cm.json)*** - * download,file - * CM names: `--adr.['download-script']...` - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/_cm.json) - 1. Run "postrocess" function from customize.py - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cuda-prebuilt/_cm.json)*** - * get,cuda - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - -___ -### Script output -`cmr "install prebuilt cuda prebuilt-cuda install-prebuilt-cuda [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_CUDA_*` -* `CM_NVCC_*` -#### New environment keys auto-detected from customize - -* `CM_CUDA_INSTALLED_PATH` -* `CM_NVCC_BIN_WITH_PATH` \ No newline at end of file diff --git a/docs/Cloud-automation/destroy-terraform/index.md b/docs/Cloud-automation/destroy-terraform/index.md deleted file mode 100644 index 514106a78..000000000 --- a/docs/Cloud-automation/destroy-terraform/index.md +++ /dev/null @@ -1,121 +0,0 @@ -Automatically generated README for this automation recipe: **destroy-terraform** - -Category: **Cloud automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=destroy-terraform,3463458d03054856) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *destroy,terraform,cmd* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "destroy terraform cmd" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=destroy,terraform,cmd` - -`cm run script --tags=destroy,terraform,cmd ` - -*or* - -`cmr "destroy terraform cmd"` - -`cmr "destroy terraform cmd " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'destroy,terraform,cmd' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="destroy,terraform,cmd"``` - -#### Run this script via Docker (beta) - -`cm docker script "destroy terraform cmd" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/_cm.json)*** - * get,terraform - * CM names: `--adr.['terraform']...` - - CM script: [get-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/get-terraform) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/destroy-terraform/_cm.json) - -___ -### Script output -`cmr "destroy terraform cmd " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Cloud-automation/get-aws-cli/index.md b/docs/Cloud-automation/get-aws-cli/index.md deleted file mode 100644 index 52bc80bfa..000000000 --- a/docs/Cloud-automation/get-aws-cli/index.md +++ /dev/null @@ -1,125 +0,0 @@ -Automatically generated README for this automation recipe: **get-aws-cli** - -Category: **Cloud automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-aws-cli,dad67944229942a3) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,aws-cli,aws,cli* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get aws-cli aws cli" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,aws-cli,aws,cli` - -`cm run script --tags=get,aws-cli,aws,cli ` - -*or* - -`cmr "get aws-cli aws cli"` - -`cmr "get aws-cli aws cli " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,aws-cli,aws,cli' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,aws-cli,aws,cli"``` - -#### Run this script via Docker (beta) - -`cm docker script "get aws-cli aws cli" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/_cm.json)*** - * install,aws-cli - * Enable this dependency only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [install-aws-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/install-aws-cli) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aws-cli/_cm.json) - -___ -### Script output -`cmr "get aws-cli aws cli " -j` -#### New environment keys (filter) - -* `CM_AWS_*` -#### New environment keys auto-detected from customize - -* `CM_AWS_CACHE_TAGS` -* `CM_AWS_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Cloud-automation/get-terraform/index.md b/docs/Cloud-automation/get-terraform/index.md deleted file mode 100644 index 22b001ca8..000000000 --- a/docs/Cloud-automation/get-terraform/index.md +++ /dev/null @@ -1,126 +0,0 @@ -Automatically generated README for this automation recipe: **get-terraform** - -Category: **Cloud automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-terraform,66b33c38a4d7461e) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,terraform,get-terraform* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get terraform get-terraform" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,terraform,get-terraform` - -`cm run script --tags=get,terraform,get-terraform ` - -*or* - -`cmr "get terraform get-terraform"` - -`cmr "get terraform get-terraform " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,terraform,get-terraform' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,terraform,get-terraform"``` - -#### Run this script via Docker (beta) - -`cm docker script "get terraform get-terraform" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/_cm.json)*** - * install,terraform - * Enable this dependency only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [install-terraform-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-terraform-from-src) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-terraform/_cm.json) - -___ -### Script output -`cmr "get terraform get-terraform " -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_TERRAFORM_*` -#### New environment keys auto-detected from customize - -* `CM_TERRAFORM_CACHE_TAGS` -* `CM_TERRAFORM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Cloud-automation/index.md b/docs/Cloud-automation/index.md new file mode 100644 index 000000000..1fc95d9c7 --- /dev/null +++ b/docs/Cloud-automation/index.md @@ -0,0 +1,8 @@ +The Cloud automation category contains the following scripts: + +- [destroy-terraform](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/destroy-terraform/README.md) +- [get-aws-cli](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-aws-cli/README.md) +- [get-terraform](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-terraform/README.md) +- [install-aws-cli](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-aws-cli/README.md) +- [install-terraform-from-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-terraform-from-src/README.md) +- [run-terraform](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/run-terraform/README.md) diff --git a/docs/Cloud-automation/install-aws-cli/index.md b/docs/Cloud-automation/install-aws-cli/index.md deleted file mode 100644 index d142c7c07..000000000 --- a/docs/Cloud-automation/install-aws-cli/index.md +++ /dev/null @@ -1,123 +0,0 @@ -Automatically generated README for this automation recipe: **install-aws-cli** - -Category: **Cloud automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-aws-cli,4d3efd333c3f4d36) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,script,aws-cli,aws,cli* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install script aws-cli aws cli" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,script,aws-cli,aws,cli` - -`cm run script --tags=install,script,aws-cli,aws,cli ` - -*or* - -`cmr "install script aws-cli aws cli"` - -`cmr "install script aws-cli aws cli " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,script,aws-cli,aws,cli' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,script,aws-cli,aws,cli"``` - -#### Run this script via Docker (beta) - -`cm docker script "install script aws-cli aws cli" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/_cm.json) - 1. Run "postrocess" function from customize.py - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-aws-cli/_cm.json)*** - * get,aws-cli - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [get-aws-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aws-cli) - -___ -### Script output -`cmr "install script aws-cli aws cli " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Cloud-automation/install-terraform-from-src/index.md b/docs/Cloud-automation/install-terraform-from-src/index.md deleted file mode 100644 index 0d7c83531..000000000 --- a/docs/Cloud-automation/install-terraform-from-src/index.md +++ /dev/null @@ -1,130 +0,0 @@ -Automatically generated README for this automation recipe: **install-terraform-from-src** - -Category: **Cloud automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-terraform-from-src,d79d47a074f34428) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,terraform,from-src* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install terraform from-src" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,terraform,from-src` - -`cm run script --tags=install,terraform,from-src ` - -*or* - -`cmr "install terraform from-src"` - -`cmr "install terraform from-src " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,terraform,from-src' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,terraform,from-src"``` - -#### Run this script via Docker (beta) - -`cm docker script "install terraform from-src" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `main` - -* `main` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/_cm.json)*** - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,tool,go - - CM script: [get-go](https://github.com/mlcommons/cm4mlops/tree/master/script/get-go) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-terraform-from-src/_cm.json) - -___ -### Script output -`cmr "install terraform from-src " -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_TERRAFORM_*` -#### New environment keys auto-detected from customize - -* `CM_TERRAFORM_BIN_WITH_PATH` -* `CM_TERRAFORM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Cloud-automation/run-terraform/index.md b/docs/Cloud-automation/run-terraform/index.md deleted file mode 100644 index 7e5699478..000000000 --- a/docs/Cloud-automation/run-terraform/index.md +++ /dev/null @@ -1,481 +0,0 @@ -Automatically generated README for this automation recipe: **run-terraform** - -Category: **Cloud automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-terraform,ec344bd44af144d7) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- - -## Setup for Google Cloud Instances -``` -sudo snap install google-cloud-cli --classic -gcloud auth application-default login -``` - -The above two commands will install google-cloud-cli and authorizes the user to access it. Once done, you can start creating gcp instance using CM commands like below. To destroy an instance just repeat the same command with `--destroy` option. - -``` -cm run script --tags=run,terraform,_gcp,_gcp_project.mlperf-inference-tests --cminit -``` -Here, `mlperf-inference-tests` is the name of the google project as created in [Google cloud console](https://console.cloud.google.com/apis/dashboard) - - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *run,terraform* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run terraform" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run,terraform` - -`cm run script --tags=run,terraform[,variations] [--input_flags]` - -*or* - -`cmr "run terraform"` - -`cmr "run terraform [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,terraform' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,terraform"``` - -#### Run this script via Docker (beta) - -`cm docker script "run terraform[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_amazon-linux-2-kernel.#` - - Environment variables: - - *TF_VAR_INSTANCE_IMAGE_OS*: `amazon-linux-2-kernel.#` - - Workflow: - * `_graviton` - - Environment variables: - - *CM_TERRAFORM_AWS_GRAVITON_INSTANCE*: `yes` - - Workflow: - * `_inferentia` - - Environment variables: - - *CM_TERRAFORM_AWS_INFERENTIA_INSTANCE*: `yes` - - Workflow: - * `_inferentia,amazon-linux-2-kernel.510` - - Workflow: - * `_rhel.#` - - Environment variables: - - *TF_VAR_INSTANCE_IMAGE_OS*: `rhel.#` - - Workflow: - * `_ubuntu.#` - - Environment variables: - - *TF_VAR_INSTANCE_IMAGE_OS*: `ubuntu.#` - - Workflow: - -
    - - - * Group "**aws-instance-image**" -
    - Click here to expand this section. - - * `_amazon-linux-2-kernel.510,arm64,us-west-2` - - Environment variables: - - *TF_VAR_INSTANCE_IMAGE*: `ami-0f1a5f5ada0e7da53` - - Workflow: - * `_aws_instance_image.#` - - Environment variables: - - *TF_VAR_INSTANCE_IMAGE*: `#` - - Workflow: - * `_aws_instance_image.ami-0735c191cf914754d` - - Environment variables: - - *TF_VAR_INSTANCE_IMAGE*: `ami-0735c191cf914754d` - - Workflow: - * `_aws_instance_image.ami-0a0d8589b597d65b3` - - Environment variables: - - *TF_VAR_INSTANCE_IMAGE*: `ami-0a0d8589b597d65b3` - - Workflow: - * `_rhel.9,x86,us-west-2` - - Environment variables: - - *TF_VAR_INSTANCE_IMAGE*: `ami-0dda7e535b65b6469` - - Workflow: - * `_ubuntu.2204,arm64,us-west-2` - - Environment variables: - - *TF_VAR_INSTANCE_IMAGE*: `ami-079f51a7bcca65b92` - - Workflow: - * `_ubuntu.2204,x86,us-west-2` - - Environment variables: - - *TF_VAR_INSTANCE_IMAGE*: `ami-0735c191cf914754d` - - Workflow: - -
    - - - * Group "**aws-instance-type**" -
    - Click here to expand this section. - - * `_a1.2xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `a1.2xlarge` - - Workflow: - * `_a1.metal` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `a1.metal` - - Workflow: - * `_a1.xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `a1.xlarge` - - Workflow: - * `_aws_instance_type.#` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `#` - - Workflow: - * `_c5.12xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `c5.12xlarge` - - Workflow: - * `_c5.4xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `c5.4xlarge` - - Workflow: - * `_c5d.9xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `c5d.9xlarge` - - Workflow: - * `_g4dn.xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `g4dn.xlarge` - - Workflow: - * `_inf1.2xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `inf1.2xlarge` - - Workflow: - * `_inf1.xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `inf1.xlarge` - - Workflow: - * `_inf2.8xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `inf2.8xlarge` - - Workflow: - * `_inf2.xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `inf2.xlarge` - - Workflow: - * `_m7g.2xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `m7g.2xlarge` - - Workflow: - * `_m7g.xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `m7g.xlarge` - - Workflow: - * `_t2.#` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `t2.#` - - Workflow: - * `_t2.2xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `t2.2xlarge` - - Workflow: - * `_t2.large` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `t2.large` - - Workflow: - * `_t2.medium` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `t2.medium` - - Workflow: - * `_t2.micro` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `t2.micro` - - Workflow: - * `_t2.nano` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `t2.nano` - - Workflow: - * `_t2.small` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `t2.small` - - Workflow: - * `_t2.xlarge` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `t2.xlarge` - - Workflow: - -
    - - - * Group "**cloud-provider**" -
    - Click here to expand this section. - - * **`_aws`** (default) - - Environment variables: - - *CM_TERRAFORM_CONFIG_DIR_NAME*: `aws` - - Workflow: - * `_gcp` - - Environment variables: - - *CM_TERRAFORM_CONFIG_DIR_NAME*: `gcp` - - Workflow: - -
    - - - * Group "**gcp-instance-image**" -
    - Click here to expand this section. - - * `_debian-cloud/debian-11` - - Environment variables: - - *TF_VAR_INSTANCE_IMAGE*: `debian-cloud/debian-11` - - Workflow: - * `_gcp_instance_image.#` - - Environment variables: - - *TF_VAR_INSTANCE_IMAGE*: `#` - - Workflow: - * `_ubuntu-2204-jammy-v20230114` - - Environment variables: - - *TF_VAR_INSTANCE_IMAGE*: `ubuntu-2204-jammy-v20230114` - - Workflow: - -
    - - - * Group "**gcp-instance-type**" -
    - Click here to expand this section. - - * `_f1-micro` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `f1-micro` - - Workflow: - * `_gcp_instance_type.#` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `#` - - Workflow: - * `_n1-highmem.#` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `n1-highmem-#` - - Workflow: - * `_n1-standard.#` - - Environment variables: - - *TF_VAR_INSTANCE_TYPE*: `n1-highmem-#` - - Workflow: - -
    - - - * Group "**gcp-project**" -
    - Click here to expand this section. - - * `_gcp_project.#` - - Environment variables: - - *TF_VAR_GCP_PROJECT*: `#` - - Workflow: - -
    - - - * Group "**instance-name**" -
    - Click here to expand this section. - - * `_instance_name.#` - - Environment variables: - - *TF_VAR_INSTANCE_NAME*: `#` - - Workflow: - -
    - - - * Group "**platform**" -
    - Click here to expand this section. - - * `_arm64` - - Environment variables: - - *CM_INSTANCE_PLATFORM*: `arm64` - - Workflow: - * **`_x86`** (default) - - Environment variables: - - *CM_INSTANCE_PLATFORM*: `x86` - - Workflow: - -
    - - - * Group "**region**" -
    - Click here to expand this section. - - * `_region.#` - - Environment variables: - - *TF_VAR_INSTANCE_REGION*: `#` - - Workflow: - * `_us-west-2` - - Environment variables: - - *TF_VAR_INSTANCE_REGION*: `us-west-2` - - Workflow: - -
    - - - * Group "**storage-size**" -
    - Click here to expand this section. - - * `_storage_size.#` - - Environment variables: - - *TF_VAR_DISK_GBS*: `#` - - Workflow: - * `_storage_size.8` - - Environment variables: - - *TF_VAR_DISK_GBS*: `8` - - Workflow: - -
    - - - * Group "**zone**" -
    - Click here to expand this section. - - * `_zone.#` - - Environment variables: - - *TF_VAR_INSTANCE_ZONE*: `#` - - Workflow: - -
    - - -#### Default variations - -`_aws,_x86` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--cminit=value` → `CM_TERRAFORM_CM_INIT=value` -* `--destroy=value` → `CM_DESTROY_TERRAFORM=value` -* `--gcp_credentials_json_file=value` → `CM_GCP_CREDENTIALS_JSON_PATH=value` -* `--key_file=value` → `CM_SSH_KEY_FILE=value` -* `--run_cmds=value` → `CM_TERRAFORM_RUN_COMMANDS=value` -* `--ssh_key_file=value` → `CM_SSH_KEY_FILE=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "cminit":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* TF_VAR_SECURITY_GROUP_ID: `sg-0783752c97d2e011d` -* TF_VAR_CPU_COUNT: `1` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/_cm.json)*** - * get,terraform - - CM script: [get-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/get-terraform) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-terraform/_cm.json)*** - * destroy,terraform - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DESTROY_TERRAFORM': ['on']}` - * CM names: `--adr.['destroy-cmd']...` - - CM script: [destroy-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/destroy-terraform) - -___ -### Script output -`cmr "run terraform [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_TERRAFORM_CONFIG_DIR` -* `CM_TERRAFORM_RUN_DIR` -#### New environment keys auto-detected from customize - -* `CM_TERRAFORM_CONFIG_DIR` -* `CM_TERRAFORM_RUN_DIR` \ No newline at end of file diff --git a/docs/Collective-benchmarking/index.md b/docs/Collective-benchmarking/index.md new file mode 100644 index 000000000..56232c1d7 --- /dev/null +++ b/docs/Collective-benchmarking/index.md @@ -0,0 +1,3 @@ +The Collective benchmarking category contains the following scripts: + +- [launch-benchmark](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/launch-benchmark/README.md) diff --git a/docs/Collective-benchmarking/launch-benchmark/index.md b/docs/Collective-benchmarking/launch-benchmark/index.md deleted file mode 100644 index 84a904b86..000000000 --- a/docs/Collective-benchmarking/launch-benchmark/index.md +++ /dev/null @@ -1,116 +0,0 @@ -Automatically generated README for this automation recipe: **launch-benchmark** - -Category: **Collective benchmarking** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=launch-benchmark,5dc7662804bc4cad) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *launch,benchmark* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "launch benchmark" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=launch,benchmark` - -`cm run script --tags=launch,benchmark ` - -*or* - -`cmr "launch benchmark"` - -`cmr "launch benchmark " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'launch,benchmark' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="launch,benchmark"``` - -#### Run this script via Docker (beta) - -`cm docker script "launch benchmark" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/_cm.yaml) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/_cm.yaml) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/launch-benchmark/_cm.yaml) - -___ -### Script output -`cmr "launch benchmark " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/get-aocl/index.md b/docs/Compiler-automation/get-aocl/index.md deleted file mode 100644 index efb20e415..000000000 --- a/docs/Compiler-automation/get-aocl/index.md +++ /dev/null @@ -1,137 +0,0 @@ -Automatically generated README for this automation recipe: **get-aocl** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-aocl,a65d3088f57d413d) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,lib,aocl,amd-optimized,amd* -* Output cached? *true* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get lib aocl amd-optimized amd" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,lib,aocl,amd-optimized,amd` - -`cm run script --tags=get,lib,aocl,amd-optimized,amd ` - -*or* - -`cmr "get lib aocl amd-optimized amd"` - -`cmr "get lib aocl amd-optimized amd " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,lib,aocl,amd-optimized,amd' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,lib,aocl,amd-optimized,amd"``` - -#### Run this script via Docker (beta) - -`cm docker script "get lib aocl amd-optimized amd" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `4.0` - -* `4.0` -* `master` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/_cm.json)*** - * get,generic,sys-util,_libmpfr-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic-python-lib,_scons - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,git,_repo.https://github.com/amd/aocl-libm-ose - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aocl/_cm.json) - -___ -### Script output -`cmr "get lib aocl amd-optimized amd " -j` -#### New environment keys (filter) - -* `+LD_LIBRARY_PATH` -* `+LIBRARY_PATH` -* `CM_AOCL_BUILD_PATH` -* `CM_AOCL_LIB_PATH` -* `CM_AOCL_SRC_PATH` -#### New environment keys auto-detected from customize - -* `CM_AOCL_BUILD_PATH` -* `CM_AOCL_LIB_PATH` -* `CM_AOCL_SRC_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/get-cl/index.md b/docs/Compiler-automation/get-cl/index.md deleted file mode 100644 index 6168f1345..000000000 --- a/docs/Compiler-automation/get-cl/index.md +++ /dev/null @@ -1,138 +0,0 @@ -Automatically generated README for this automation recipe: **get-cl** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cl,7dbb770faff947c0) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,cl,compiler,c-compiler,cpp-compiler,get-cl* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get cl compiler c-compiler cpp-compiler get-cl" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,cl,compiler,c-compiler,cpp-compiler,get-cl` - -`cm run script --tags=get,cl,compiler,c-compiler,cpp-compiler,get-cl ` - -*or* - -`cmr "get cl compiler c-compiler cpp-compiler get-cl"` - -`cmr "get cl compiler c-compiler cpp-compiler get-cl " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,cl,compiler,c-compiler,cpp-compiler,get-cl' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,cl,compiler,c-compiler,cpp-compiler,get-cl"``` - -#### Run this script via Docker (beta) - -`cm docker script "get cl compiler c-compiler cpp-compiler get-cl" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/run.bat) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cl/_cm.json) - -___ -### Script output -`cmr "get cl compiler c-compiler cpp-compiler get-cl " -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_CL_*` -* `CM_COMPILER_*` -* `CM_CXX_COMPILER_*` -* `CM_C_COMPILER_*` -* `CM_LINKER_*` -#### New environment keys auto-detected from customize - -* `CM_CL_BIN` -* `CM_CL_BIN_WITH_PATH` -* `CM_CL_CACHE_TAGS` -* `CM_COMPILER_CACHE_TAGS` -* `CM_COMPILER_FAMILY` -* `CM_COMPILER_VERSION` -* `CM_CXX_COMPILER_BIN` -* `CM_CXX_COMPILER_FLAG_OUTPUT` -* `CM_CXX_COMPILER_FLAG_VERSION` -* `CM_CXX_COMPILER_WITH_PATH` -* `CM_C_COMPILER_BIN` -* `CM_C_COMPILER_FLAG_OUTPUT` -* `CM_C_COMPILER_FLAG_VERSION` -* `CM_C_COMPILER_WITH_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/get-compiler-flags/index.md b/docs/Compiler-automation/get-compiler-flags/index.md deleted file mode 100644 index b953d6388..000000000 --- a/docs/Compiler-automation/get-compiler-flags/index.md +++ /dev/null @@ -1,130 +0,0 @@ -Automatically generated README for this automation recipe: **get-compiler-flags** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-compiler-flags,31be8b74a69742f8) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,compiler-flags* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get compiler-flags" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,compiler-flags` - -`cm run script --tags=get,compiler-flags ` - -*or* - -`cmr "get compiler-flags"` - -`cmr "get compiler-flags " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,compiler-flags' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,compiler-flags"``` - -#### Run this script via Docker (beta) - -`cm docker script "get compiler-flags" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/_cm.json)*** - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,compiler - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_C_COMPILER_BIN': ['on']}` - * CM names: `--adr.['compiler']...` - - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-flags/_cm.json) - -___ -### Script output -`cmr "get compiler-flags " -j` -#### New environment keys (filter) - -* `+ CFLAGS` -* `+ CXXFLAGS` -* `+ FFLAGS` -* `+ LDFLAGS` -* `+CM_HOST_OS_DEFAULT_INCLUDE_PATH` -#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/get-compiler-rust/index.md b/docs/Compiler-automation/get-compiler-rust/index.md deleted file mode 100644 index f0692f37a..000000000 --- a/docs/Compiler-automation/get-compiler-rust/index.md +++ /dev/null @@ -1,120 +0,0 @@ -Automatically generated README for this automation recipe: **get-compiler-rust** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-compiler-rust,97ffbd9e537b4b59) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *get,rust-compiler* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get rust-compiler" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,rust-compiler` - -`cm run script --tags=get,rust-compiler ` - -*or* - -`cmr "get rust-compiler"` - -`cmr "get rust-compiler " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,rust-compiler' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,rust-compiler"``` - -#### Run this script via Docker (beta) - -`cm docker script "get rust-compiler" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/_cm.yaml)*** - * get,python3 - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/_cm.yaml) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-compiler-rust/_cm.yaml) - -___ -### Script output -`cmr "get rust-compiler " -j` -#### New environment keys (filter) - -* `+PATH` -#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/get-gcc/index.md b/docs/Compiler-automation/get-gcc/index.md deleted file mode 100644 index 66bb2f11f..000000000 --- a/docs/Compiler-automation/get-gcc/index.md +++ /dev/null @@ -1,154 +0,0 @@ -Automatically generated README for this automation recipe: **get-gcc** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-gcc,dbf4ab5cbed74372) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,gcc,compiler,c-compiler,cpp-compiler,get-gcc* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get gcc compiler c-compiler cpp-compiler get-gcc" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,gcc,compiler,c-compiler,cpp-compiler,get-gcc` - -`cm run script --tags=get,gcc,compiler,c-compiler,cpp-compiler,get-gcc ` - -*or* - -`cmr "get gcc compiler c-compiler cpp-compiler get-gcc"` - -`cmr "get gcc compiler c-compiler cpp-compiler get-gcc " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,gcc,compiler,c-compiler,cpp-compiler,get-gcc' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,gcc,compiler,c-compiler,cpp-compiler,get-gcc"``` - -#### Run this script via Docker (beta) - -`cm docker script "get gcc compiler c-compiler cpp-compiler get-gcc" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-gcc/_cm.json)*** - * get,compiler-flags - - CM script: [get-compiler-flags](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-flags) - -___ -### Script output -`cmr "get gcc compiler c-compiler cpp-compiler get-gcc " -j` -#### New environment keys (filter) - -* `+ CFLAGS` -* `+ CXXFLAGS` -* `+ FFLAGS` -* `+ LDFLAGS` -* `+CM_HOST_OS_DEFAULT_INCLUDE_PATH` -* `+PATH` -* `CM_COMPILER_*` -* `CM_CXX_COMPILER_*` -* `CM_C_COMPILER_*` -* `CM_GCC_*` -* `CM_LINKER_*` -#### New environment keys auto-detected from customize - -* `CM_COMPILER_CACHE_TAGS` -* `CM_COMPILER_FAMILY` -* `CM_COMPILER_FLAGS_DEBUG` -* `CM_COMPILER_FLAGS_DEFAULT` -* `CM_COMPILER_FLAGS_FAST` -* `CM_COMPILER_VERSION` -* `CM_CXX_COMPILER_BIN` -* `CM_CXX_COMPILER_FLAG_OUTPUT` -* `CM_CXX_COMPILER_FLAG_VERSION` -* `CM_CXX_COMPILER_WITH_PATH` -* `CM_C_COMPILER_BIN` -* `CM_C_COMPILER_FLAG_OUTPUT` -* `CM_C_COMPILER_FLAG_VERSION` -* `CM_C_COMPILER_WITH_PATH` -* `CM_GCC_BIN` -* `CM_GCC_CACHE_TAGS` -* `CM_GCC_INSTALLED_PATH` -* `CM_LINKER_FLAGS_DEBUG` -* `CM_LINKER_FLAGS_DEFAULT` -* `CM_LINKER_FLAGS_FAST` \ No newline at end of file diff --git a/docs/Compiler-automation/get-go/index.md b/docs/Compiler-automation/get-go/index.md deleted file mode 100644 index 963eca057..000000000 --- a/docs/Compiler-automation/get-go/index.md +++ /dev/null @@ -1,126 +0,0 @@ -Automatically generated README for this automation recipe: **get-go** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-go,ab42647a96724a25) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,tool,go,get-go* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get tool go get-go" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,tool,go,get-go` - -`cm run script --tags=get,tool,go,get-go ` - -*or* - -`cmr "get tool go get-go"` - -`cmr "get tool go get-go " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,tool,go,get-go' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,tool,go,get-go"``` - -#### Run this script via Docker (beta) - -`cm docker script "get tool go get-go" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/_cm.json)*** - * install,go - * Enable this dependency only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - *Warning: no scripts found* - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-go/_cm.json) - -___ -### Script output -`cmr "get tool go get-go " -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_GO_*` -#### New environment keys auto-detected from customize - -* `CM_GO_CACHE_TAGS` -* `CM_GO_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/get-llvm/index.md b/docs/Compiler-automation/get-llvm/index.md deleted file mode 100644 index 8615f3831..000000000 --- a/docs/Compiler-automation/get-llvm/index.md +++ /dev/null @@ -1,175 +0,0 @@ -Automatically generated README for this automation recipe: **get-llvm** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-llvm,99832a103ed04eb8) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,llvm,compiler,c-compiler,cpp-compiler,get-llvm* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get llvm compiler c-compiler cpp-compiler get-llvm" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,llvm,compiler,c-compiler,cpp-compiler,get-llvm` - -`cm run script --tags=get,llvm,compiler,c-compiler,cpp-compiler,get-llvm[,variations] ` - -*or* - -`cmr "get llvm compiler c-compiler cpp-compiler get-llvm"` - -`cmr "get llvm compiler c-compiler cpp-compiler get-llvm [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,llvm,compiler,c-compiler,cpp-compiler,get-llvm' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,llvm,compiler,c-compiler,cpp-compiler,get-llvm"``` - -#### Run this script via Docker (beta) - -`cm docker script "get llvm compiler c-compiler cpp-compiler get-llvm[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_from-prebuilt` - - Workflow: - * `_from-src` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/_cm.json)*** - * install,llvm - * Enable this dependency only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - * CM names: `--adr.llvm-install...` - - CM script: [install-llvm-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-prebuilt) - - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-llvm/_cm.json)*** - * get,compiler-flags - - CM script: [get-compiler-flags](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-flags) - -___ -### Script output -`cmr "get llvm compiler c-compiler cpp-compiler get-llvm [,variations]" -j` -#### New environment keys (filter) - -* `+ CFLAGS` -* `+ CXXFLAGS` -* `+ FFLAGS` -* `+ LDFLAGS` -* `+CM_HOST_OS_DEFAULT_INCLUDE_PATH` -* `+PATH` -* `CM_COMPILER_*` -* `CM_CXX_COMPILER_*` -* `CM_C_COMPILER_*` -* `CM_LINKER_*` -* `CM_LLVM_*` -#### New environment keys auto-detected from customize - -* `CM_COMPILER_CACHE_TAGS` -* `CM_COMPILER_FAMILY` -* `CM_COMPILER_FLAGS_DEBUG` -* `CM_COMPILER_FLAGS_DEFAULT` -* `CM_COMPILER_FLAGS_FAST` -* `CM_COMPILER_VERSION` -* `CM_CXX_COMPILER_BIN` -* `CM_CXX_COMPILER_FLAG_INCLUDE` -* `CM_CXX_COMPILER_FLAG_OUTPUT` -* `CM_CXX_COMPILER_FLAG_VERSION` -* `CM_CXX_COMPILER_WITH_PATH` -* `CM_C_COMPILER_BIN` -* `CM_C_COMPILER_FLAG_INCLUDE` -* `CM_C_COMPILER_FLAG_OUTPUT` -* `CM_C_COMPILER_FLAG_VERSION` -* `CM_C_COMPILER_WITH_PATH` -* `CM_LINKER_FLAGS_DEBUG` -* `CM_LINKER_FLAGS_DEFAULT` -* `CM_LINKER_FLAGS_FAST` -* `CM_LLVM_CLANG_BIN` -* `CM_LLVM_CLANG_CACHE_TAGS` \ No newline at end of file diff --git a/docs/Compiler-automation/index.md b/docs/Compiler-automation/index.md new file mode 100644 index 000000000..656f68794 --- /dev/null +++ b/docs/Compiler-automation/index.md @@ -0,0 +1,20 @@ +The Compiler automation category contains the following scripts: + +- [get-aocl](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-aocl/README.md) +- [get-cl](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-cl/README.md) +- [get-compiler-flags](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-compiler-flags/README.md) +- [get-compiler-rust](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-compiler-rust/README.md) +- [get-gcc](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-gcc/README.md) +- [get-go](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-go/README.md) +- [get-llvm](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-llvm/README.md) +- [install-gcc-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-gcc-src/README.md) +- [install-ipex-from-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-ipex-from-src/README.md) +- [install-llvm-prebuilt](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-llvm-prebuilt/README.md) +- [install-llvm-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-llvm-src/README.md) +- [install-onednn-from-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-onednn-from-src/README.md) +- [install-onnxruntime-from-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-onnxruntime-from-src/README.md) +- [install-pytorch-from-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-pytorch-from-src/README.md) +- [install-pytorch-kineto-from-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-pytorch-kineto-from-src/README.md) +- [install-torchvision-from-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-torchvision-from-src/README.md) +- [install-tpp-pytorch-extension](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-tpp-pytorch-extension/README.md) +- [install-transformers-from-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-transformers-from-src/README.md) diff --git a/docs/Compiler-automation/install-gcc-src/index.md b/docs/Compiler-automation/install-gcc-src/index.md deleted file mode 100644 index a4fa68a1c..000000000 --- a/docs/Compiler-automation/install-gcc-src/index.md +++ /dev/null @@ -1,127 +0,0 @@ -Automatically generated README for this automation recipe: **install-gcc-src** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-gcc-src,faae0ebd6e1242db) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,src,gcc,src-gcc* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install src gcc src-gcc" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,src,gcc,src-gcc` - -`cm run script --tags=install,src,gcc,src-gcc ` - -*or* - -`cmr "install src gcc src-gcc"` - -`cmr "install src gcc src-gcc " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,src,gcc,src-gcc' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,src,gcc,src-gcc"``` - -#### Run this script via Docker (beta) - -`cm docker script "install src gcc src-gcc" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `12` - -* `master` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/_cm.json) - 1. Run "postrocess" function from customize.py - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gcc-src/_cm.json)*** - * get,gcc - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - -___ -### Script output -`cmr "install src gcc src-gcc " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-ipex-from-src/index.md b/docs/Compiler-automation/install-ipex-from-src/index.md deleted file mode 100644 index 9762178c4..000000000 --- a/docs/Compiler-automation/install-ipex-from-src/index.md +++ /dev/null @@ -1,198 +0,0 @@ -Automatically generated README for this automation recipe: **install-ipex-from-src** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-ipex-from-src,09364fff2bf04516) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,ipex,src-ipex* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install get src from.src ipex src-ipex" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,get,src,from.src,ipex,src-ipex` - -`cm run script --tags=install,get,src,from.src,ipex,src-ipex[,variations] ` - -*or* - -`cmr "install get src from.src ipex src-ipex"` - -`cmr "install get src from.src ipex src-ipex [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,get,src,from.src,ipex,src-ipex' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,get,src,from.src,ipex,src-ipex"``` - -#### Run this script via Docker (beta) - -`cm docker script "install get src from.src ipex src-ipex[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - * `_for-intel-mlperf-inference-v3.1-gptj` - - Environment variables: - - *CM_CONDA_ENV*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,conda,_name.gptj-pt - * CM names: `--adr.['conda']...` - - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) - * get,generic,conda-package,_package.python - * CM names: `--adr.['conda-package', 'python3']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.wheel,_source.conda-forge - * CM names: `--adr.['conda-package', 'wheel']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.setuptools,_source.conda-forge - * CM names: `--adr.['conda-package', 'setuptools']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * install,llvm,src,_for-intel-mlperf-inference-v3.1-gptj - - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) - * `_sha.#` - - Environment variables: - - *CM_GIT_CHECKOUT_SHA*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - - - * Group "**repo**" -
    - Click here to expand this section. - - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - * **`_repo.https://github.com/intel/intel-extension-for-pytorch`** (default) - - Environment variables: - - *CM_GIT_URL*: `https://github.com/intel/intel-extension-for-pytorch` - - Workflow: - -
    - - -#### Default variations - -`_repo.https://github.com/intel/intel-extension-for-pytorch` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,python3 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CONDA_ENV': ['yes']}` - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,pytorch,from.src - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CONDA_ENV': ['yes']}` - * CM names: `--adr.['pytorch']...` - - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) - * get,git,repo - * CM names: `--adr.['ipex-src-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-ipex-from-src/_cm.json) - -___ -### Script output -`cmr "install get src from.src ipex src-ipex [,variations]" -j` -#### New environment keys (filter) - -* `CM_IPEX_*` -#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-llvm-prebuilt/index.md b/docs/Compiler-automation/install-llvm-prebuilt/index.md deleted file mode 100644 index 6d338b092..000000000 --- a/docs/Compiler-automation/install-llvm-prebuilt/index.md +++ /dev/null @@ -1,137 +0,0 @@ -Automatically generated README for this automation recipe: **install-llvm-prebuilt** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-llvm-prebuilt,cda9094971724a0a) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm` - -`cm run script --tags=install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm ` - -*or* - -`cmr "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm"` - -`cmr "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,prebuilt,llvm,prebuilt-llvm,install-prebuilt-llvm"``` - -#### Run this script via Docker (beta) - -`cm docker script "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `15.0.6` - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-prebuilt/_cm.json)*** - * get,llvm - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) - -___ -### Script output -`cmr "install prebuilt llvm prebuilt-llvm install-prebuilt-llvm " -j` -#### New environment keys (filter) - -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+LD_LIBRARY_PATH` -* `+PATH` -* `CM_COMPILER_NAME` -* `CM_LLVM_*` -#### New environment keys auto-detected from customize - -* `CM_LLVM_CLANG_BIN_WITH_PATH` -* `CM_LLVM_INSTALLED_PATH` -* `CM_LLVM_PACKAGE` \ No newline at end of file diff --git a/docs/Compiler-automation/install-llvm-src/index.md b/docs/Compiler-automation/install-llvm-src/index.md deleted file mode 100644 index 331fbea92..000000000 --- a/docs/Compiler-automation/install-llvm-src/index.md +++ /dev/null @@ -1,292 +0,0 @@ -Automatically generated README for this automation recipe: **install-llvm-src** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-llvm-src,2af16e9a6c5f4702) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,src,llvm,from.src,src-llvm* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install src llvm from.src src-llvm" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,src,llvm,from.src,src-llvm` - -`cm run script --tags=install,src,llvm,from.src,src-llvm[,variations] ` - -*or* - -`cmr "install src llvm from.src src-llvm"` - -`cmr "install src llvm from.src src-llvm [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,src,llvm,from.src,src-llvm' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,src,llvm,from.src,src-llvm"``` - -#### Run this script via Docker (beta) - -`cm docker script "install src llvm from.src src-llvm[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - * `_for-intel-mlperf-inference-v3.1-bert` - - Environment variables: - - *CM_LLVM_CONDA_ENV*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,gcc - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - * get,conda,_name.bert-pt - * CM names: `--adr.['conda']...` - - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) - * get,conda-package,_package.ncurses,_source.conda-forge - * CM names: `--adr.['conda-package', 'ncurses']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.ninja - * CM names: `--adr.['conda-package', 'ninja']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.cmake - * CM names: `--adr.['conda-package', 'cmake']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,conda-package,_package.llvm-openmp,_source.conda-forge - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,conda-package,_package.chardet - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.libstdcxx-ng,_source.conda-forge - * CM names: `--adr.['conda-package', 'libstdcxx-ng']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * `_for-intel-mlperf-inference-v3.1-gptj` - - Environment variables: - - *CM_LLVM_CONDA_ENV*: `yes` - - *CM_LLVM_16_INTEL_MLPERF_INFERENCE*: `yes` - - *USE_CUDA*: `0` - - *CUDA_VISIBLE_DEVICES*: `` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-sys-util,_g++-12 - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,gcc - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - * get,conda,_name.gptj-pt - * CM names: `--adr.['conda']...` - - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) - * get,generic,conda-package,_package.python - * CM names: `--adr.['conda-package', 'python']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,conda-package,_package.ncurses,_source.conda-forge - * CM names: `--adr.['conda-package', 'ncurses']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,conda-package,_package.chardet - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.libstdcxx-ng,_source.conda-forge - * CM names: `--adr.['conda-package', 'libstdcxx-ng']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.mkl,_source.intel - * CM names: `--adr.['conda-package', 'mkl']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.mkl-include,_source.intel - * CM names: `--adr.['conda-package', 'mkl-include']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.intel-openmp,_source.intel - * CM names: `--adr.['conda-package', 'intel-openmp']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.gperftools,_source.conda-forge - * CM names: `--adr.['conda-package', 'gperftools']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.pybind11,_source.conda-forge - * CM names: `--adr.['conda-package', 'pybind11']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic-python-lib,_custom-python,_package.torch,_url.git+https://github.com/pytorch/pytorch.git@927dc662386af052018212c7d01309a506fc94cd - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_custom-python,_package.setuptools - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_custom-python,_package.neural-compressor,_url.git+https://github.com/intel/neural-compressor.git@a2931eaa4052eec195be3c79a13f7bfa23e54473 - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_full-history` - - Workflow: - * `_runtimes.#` - - Environment variables: - - *CM_LLVM_ENABLE_RUNTIMES*: `#` - - Workflow: - * `_sha.#` - - Environment variables: - - *CM_GIT_CHECKOUT_SHA*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - - - * Group "**build-type**" -
    - Click here to expand this section. - - * `_debug` - - Environment variables: - - *CM_LLVM_BUILD_TYPE*: `debug` - - Workflow: - * **`_release`** (default) - - Environment variables: - - *CM_LLVM_BUILD_TYPE*: `release` - - Workflow: - -
    - - - * Group "**clang**" -
    - Click here to expand this section. - - * **`_clang`** (default) - - Environment variables: - - *CM_LLVM_ENABLE_PROJECTS*: `clang` - - Workflow: - -
    - - - * Group "**repo**" -
    - Click here to expand this section. - - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - -
    - - -#### Default variations - -`_clang,_release` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,cmake - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_LLVM_CONDA_ENV': ['yes']}` - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * get,generic-sys-util,_ninja-build - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_LLVM_CONDA_ENV': ['yes']}` - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,git,repo - * CM names: `--adr.['llvm-src-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-llvm-src/_cm.json)*** - * get,llvm - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) - -___ -### Script output -`cmr "install src llvm from.src src-llvm [,variations]" -j` -#### New environment keys (filter) - -* `+C_INCLUDE_PATH` -* `+PATH` -* `CM_GET_DEPENDENT_CACHED_PATH` -* `CM_LLVM_*` -#### New environment keys auto-detected from customize - -* `CM_GET_DEPENDENT_CACHED_PATH` -* `CM_LLVM_CLANG_BIN_WITH_PATH` -* `CM_LLVM_CMAKE_CMD` -* `CM_LLVM_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/install-onednn-from-src/index.md b/docs/Compiler-automation/install-onednn-from-src/index.md deleted file mode 100644 index 3d9232a76..000000000 --- a/docs/Compiler-automation/install-onednn-from-src/index.md +++ /dev/null @@ -1,181 +0,0 @@ -Automatically generated README for this automation recipe: **install-onednn-from-src** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-onednn-from-src,fe3a652e315f4c8f) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,onednn,src-onednn* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install get src from.src onednn src-onednn" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,get,src,from.src,onednn,src-onednn` - -`cm run script --tags=install,get,src,from.src,onednn,src-onednn[,variations] ` - -*or* - -`cmr "install get src from.src onednn src-onednn"` - -`cmr "install get src from.src onednn src-onednn [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,get,src,from.src,onednn,src-onednn' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,get,src,from.src,onednn,src-onednn"``` - -#### Run this script via Docker (beta) - -`cm docker script "install get src from.src onednn src-onednn[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - * `_for-intel-mlperf-inference-v3.1-bert` - - Environment variables: - - *CM_CONDA_ENV*: `yes` - - *CM_FOR_INTEL_MLPERF_INFERENCE*: `yes` - - Workflow: - * `_sha.#` - - Environment variables: - - *CM_GIT_CHECKOUT_SHA*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - - - * Group "**repo**" -
    - Click here to expand this section. - - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - * **`_repo.https://github.com/oneapi-src/oneDNN`** (default) - - Environment variables: - - *CM_GIT_URL*: `https://github.com/oneapi-src/oneDNN` - - Workflow: - -
    - - -#### Default variations - -`_repo.https://github.com/oneapi-src/oneDNN` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,python3 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CONDA_ENV': ['yes']}` - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,git,repo - * CM names: `--adr.['onednn-src-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/_cm.json) - 1. ***Run native script if exists*** - * [run-intel-mlperf-inference.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/run-intel-mlperf-inference.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onednn-from-src/_cm.json) - -___ -### Script output -`cmr "install get src from.src onednn src-onednn [,variations]" -j` -#### New environment keys (filter) - -* `CM_ONEDNN_*` -#### New environment keys auto-detected from customize - -* `CM_ONEDNN_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Compiler-automation/install-onnxruntime-from-src/index.md b/docs/Compiler-automation/install-onnxruntime-from-src/index.md deleted file mode 100644 index e99fa55e7..000000000 --- a/docs/Compiler-automation/install-onnxruntime-from-src/index.md +++ /dev/null @@ -1,184 +0,0 @@ -Automatically generated README for this automation recipe: **install-onnxruntime-from-src** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-onnxruntime-from-src,9798c7e7a5944cee) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,onnxruntime,src-onnxruntime* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install get src from.src onnxruntime src-onnxruntime" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,get,src,from.src,onnxruntime,src-onnxruntime` - -`cm run script --tags=install,get,src,from.src,onnxruntime,src-onnxruntime[,variations] ` - -*or* - -`cmr "install get src from.src onnxruntime src-onnxruntime"` - -`cmr "install get src from.src onnxruntime src-onnxruntime [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,get,src,from.src,onnxruntime,src-onnxruntime' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,get,src,from.src,onnxruntime,src-onnxruntime"``` - -#### Run this script via Docker (beta) - -`cm docker script "install get src from.src onnxruntime src-onnxruntime[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - * `_cuda` - - Environment variables: - - *CM_ONNXRUNTIME_GPU*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda,_cudnn - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_sha.#` - - Environment variables: - - *CM_GIT_CHECKOUT_SHA*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - - - * Group "**repo**" -
    - Click here to expand this section. - - * **`_repo.https://github.com/Microsoft/onnxruntime`** (default) - - Environment variables: - - *CM_GIT_URL*: `https://github.com/Microsoft/onnxruntime` - - Workflow: - -
    - - -#### Default variations - -`_repo.https://github.com/Microsoft/onnxruntime` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * fail,filter,_windows - - CM script: [fail](https://github.com/mlcommons/cm4mlops/tree/master/script/fail) - * get,python3 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CONDA_ENV': ['yes']}` - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * get,gcc - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - * get,git,repo - * CM names: `--adr.['onnxruntime-src-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-onnxruntime-from-src/_cm.json) - -___ -### Script output -`cmr "install get src from.src onnxruntime src-onnxruntime [,variations]" -j` -#### New environment keys (filter) - -* `CM_ONNXRUNTIME_*` -#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-pytorch-from-src/index.md b/docs/Compiler-automation/install-pytorch-from-src/index.md deleted file mode 100644 index c63c6219f..000000000 --- a/docs/Compiler-automation/install-pytorch-from-src/index.md +++ /dev/null @@ -1,248 +0,0 @@ -Automatically generated README for this automation recipe: **install-pytorch-from-src** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-pytorch-from-src,64eaf3e81de94f41) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,pytorch,src-pytorch* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install get src from.src pytorch src-pytorch" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,get,src,from.src,pytorch,src-pytorch` - -`cm run script --tags=install,get,src,from.src,pytorch,src-pytorch[,variations] ` - -*or* - -`cmr "install get src from.src pytorch src-pytorch"` - -`cmr "install get src from.src pytorch src-pytorch [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,get,src,from.src,pytorch,src-pytorch' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,get,src,from.src,pytorch,src-pytorch"``` - -#### Run this script via Docker (beta) - -`cm docker script "install get src from.src pytorch src-pytorch[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - * `_cuda` - - Environment variables: - - *CUDA_HOME*: `<<>>` - - *CUDNN_LIBRARY_PATH*: `<<>>` - - *CUDNN_INCLUDE_PATH*: `<<>>` - - *CUDA_NVCC_EXECUTABLE*: `<<>>` - - *USE_CUDA*: `1` - - *USE_CUDNN*: `1` - - *TORCH_CUDA_ARCH_LIST*: `Ampere Ada Hopper` - - *TORCH_CXX_FLAGS*: `-D_GLIBCXX_USE_CXX11_ABI=1` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda,_cudnn - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_for-intel-mlperf-inference-v3.1-bert` - - Environment variables: - - *CM_CONDA_ENV*: `yes` - - *CM_MLPERF_INFERENCE_INTEL*: `yes` - - *USE_CUDA*: `0` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-sys-util,_libffi7 - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,conda,_name.bert-pt - * CM names: `--adr.['conda']...` - - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) - * get,generic,conda-package,_package.ncurses,_source.conda-forge - * CM names: `--adr.['conda-package', 'ncurses']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.python - * CM names: `--adr.['conda-package', 'python3']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * install,llvm,src,_tag.llvmorg-15.0.7,_runtimes.libcxx:libcxxabi:openmp,_clang,_release,_for-intel-mlperf-inference-v3.1-bert - - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) - * get,generic,conda-package,_package.ninja - * CM names: `--adr.['conda-package', 'ninja']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.cmake - * CM names: `--adr.['conda-package', 'cmake']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.mkl,_source.intel - * CM names: `--adr.['conda-package', 'mkl']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.mkl-include,_source.intel - * CM names: `--adr.['conda-package', 'mkl-include']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.intel-openmp,_source.intel - * CM names: `--adr.['conda-package', 'intel-openmp']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.llvm-openmp,_source.conda-forge - * CM names: `--adr.['conda-package', 'llvm-openmp']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.jemalloc,_source.conda-forge - * CM names: `--adr.['conda-package', 'jemalloc']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.wheel,_source.conda-forge - * CM names: `--adr.['conda-package', 'wheel']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.setuptools,_source.conda-forge - * CM names: `--adr.['conda-package', 'setuptools']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.future,_source.conda-forge - * CM names: `--adr.['conda-package', 'future']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.libstdcxx-ng,_source.conda-forge - * CM names: `--adr.['conda-package', 'libstdcxx-ng']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * `_for-nvidia-mlperf-inference-v3.1` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * `_sha.#` - - Environment variables: - - *CM_GIT_CHECKOUT_SHA*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - - - * Group "**repo**" -
    - Click here to expand this section. - - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - * **`_repo.https://github.com/pytorch/pytorch`** (default) - - Environment variables: - - *CM_GIT_URL*: `https://github.com/pytorch/pytorch` - - Workflow: - -
    - - -#### Default variations - -`_repo.https://github.com/pytorch/pytorch` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,python3 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CONDA_ENV': ['yes']}` - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,git,repo - * CM names: `--adr.['pytorch-src-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/_cm.json) - 1. ***Run native script if exists*** - * [run-intel-mlperf-inference-v3_1.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/run-intel-mlperf-inference-v3_1.sh) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-from-src/_cm.json) - -___ -### Script output -`cmr "install get src from.src pytorch src-pytorch [,variations]" -j` -#### New environment keys (filter) - -* `CM_PYTORCH_*` -#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-pytorch-kineto-from-src/index.md b/docs/Compiler-automation/install-pytorch-kineto-from-src/index.md deleted file mode 100644 index 5b38ea6ed..000000000 --- a/docs/Compiler-automation/install-pytorch-kineto-from-src/index.md +++ /dev/null @@ -1,191 +0,0 @@ -Automatically generated README for this automation recipe: **install-pytorch-kineto-from-src** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-pytorch-kineto-from-src,98a4b061712d4483) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install get src from.src pytorch-kineto kineto src-pytorch-kineto" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto` - -`cm run script --tags=install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto[,variations] ` - -*or* - -`cmr "install get src from.src pytorch-kineto kineto src-pytorch-kineto"` - -`cmr "install get src from.src pytorch-kineto kineto src-pytorch-kineto [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,get,src,from.src,pytorch-kineto,kineto,src-pytorch-kineto"``` - -#### Run this script via Docker (beta) - -`cm docker script "install get src from.src pytorch-kineto kineto src-pytorch-kineto[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - * `_cuda` - - Environment variables: - - *CUDA_HOME*: `<<>>` - - *CUDA_NVCC_EXECUTABLE*: `<<>>` - - *CUDNN_INCLUDE_PATH*: `<<>>` - - *CUDNN_LIBRARY_PATH*: `<<>>` - - *TORCH_CUDA_ARCH_LIST*: `Ampere Ada Hopper` - - *TORCH_CXX_FLAGS*: `-D_GLIBCXX_USE_CXX11_ABI=1` - - *USE_CUDA*: `1` - - *USE_CUDNN*: `1` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda,_cudnn - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_sha.#` - - Environment variables: - - *CM_GIT_CHECKOUT_SHA*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - - - * Group "**repo**" -
    - Click here to expand this section. - - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - * **`_repo.https://github.com/pytorch/kineto`** (default) - - Environment variables: - - *CM_GIT_URL*: `https://github.com/pytorch/kineto` - - Workflow: - -
    - - -#### Default variations - -`_repo.https://github.com/pytorch/kineto` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,python3 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CONDA_ENV': ['yes']}` - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * get,git,repo - * CM names: `--adr.['pytorch-kineto-src-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-pytorch-kineto-from-src/_cm.json) - -___ -### Script output -`cmr "install get src from.src pytorch-kineto kineto src-pytorch-kineto [,variations]" -j` -#### New environment keys (filter) - -* `CM_PYTORCH_KINETO_*` -#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-torchvision-from-src/index.md b/docs/Compiler-automation/install-torchvision-from-src/index.md deleted file mode 100644 index c269b624b..000000000 --- a/docs/Compiler-automation/install-torchvision-from-src/index.md +++ /dev/null @@ -1,194 +0,0 @@ -Automatically generated README for this automation recipe: **install-torchvision-from-src** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-torchvision-from-src,68b855780d474546) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install get src from.src pytorchvision torchvision src-pytorchvision" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision` - -`cm run script --tags=install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision[,variations] ` - -*or* - -`cmr "install get src from.src pytorchvision torchvision src-pytorchvision"` - -`cmr "install get src from.src pytorchvision torchvision src-pytorchvision [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,get,src,from.src,pytorchvision,torchvision,src-pytorchvision"``` - -#### Run this script via Docker (beta) - -`cm docker script "install get src from.src pytorchvision torchvision src-pytorchvision[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - * `_cuda` - - Environment variables: - - *CUDA_HOME*: `<<>>` - - *CUDA_NVCC_EXECUTABLE*: `<<>>` - - *CUDNN_INCLUDE_PATH*: `<<>>` - - *CUDNN_LIBRARY_PATH*: `<<>>` - - *USE_CUDA*: `1` - - *USE_CUDNN*: `1` - - *TORCH_CUDA_ARCH_LIST*: `Ampere Ada Hopper` - - *TORCH_CXX_FLAGS*: `-D_GLIBCXX_USE_CXX11_ABI=1` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda,_cudnn - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_for-nvidia-mlperf-inference-v3.1` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * install,pytorch,from.src,_for-nvidia-mlperf-inference-v3.1 - - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) - * `_sha.#` - - Environment variables: - - *CM_GIT_CHECKOUT_SHA*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - - - * Group "**repo**" -
    - Click here to expand this section. - - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - * **`_repo.https://github.com/pytorch/vision`** (default) - - Environment variables: - - *CM_GIT_URL*: `https://github.com/pytorch/vision` - - Workflow: - -
    - - -#### Default variations - -`_repo.https://github.com/pytorch/vision` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,python3 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CONDA_ENV': ['yes']}` - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,git,repo - * CM names: `--adr.['pytorchision-src-repo', 'torchision-src-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-torchvision-from-src/_cm.json) - -___ -### Script output -`cmr "install get src from.src pytorchvision torchvision src-pytorchvision [,variations]" -j` -#### New environment keys (filter) - -* `CM_PYTORCHVISION_*` -#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-tpp-pytorch-extension/index.md b/docs/Compiler-automation/install-tpp-pytorch-extension/index.md deleted file mode 100644 index 56669e10f..000000000 --- a/docs/Compiler-automation/install-tpp-pytorch-extension/index.md +++ /dev/null @@ -1,198 +0,0 @@ -Automatically generated README for this automation recipe: **install-tpp-pytorch-extension** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-tpp-pytorch-extension,1701d2f5f4e84d42) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,get,src,from.src,tpp-pex,src-tpp-pex* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install get src from.src tpp-pex src-tpp-pex" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,get,src,from.src,tpp-pex,src-tpp-pex` - -`cm run script --tags=install,get,src,from.src,tpp-pex,src-tpp-pex[,variations] ` - -*or* - -`cmr "install get src from.src tpp-pex src-tpp-pex"` - -`cmr "install get src from.src tpp-pex src-tpp-pex [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,get,src,from.src,tpp-pex,src-tpp-pex' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,get,src,from.src,tpp-pex,src-tpp-pex"``` - -#### Run this script via Docker (beta) - -`cm docker script "install get src from.src tpp-pex src-tpp-pex[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - * `_for-intel-mlperf-inference-v3.1-gptj` - - Environment variables: - - *CM_CONDA_ENV*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,conda,_name.gptj-pt - * CM names: `--adr.['conda']...` - - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) - * get,generic,conda-package,_package.python - * CM names: `--adr.['conda-package', 'python3']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.wheel,_source.conda-forge - * CM names: `--adr.['conda-package', 'wheel']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.setuptools,_source.conda-forge - * CM names: `--adr.['conda-package', 'setuptools']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * install,llvm,src,_for-intel-mlperf-inference-v3.1-gptj - - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) - * `_sha.#` - - Environment variables: - - *CM_GIT_CHECKOUT_SHA*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - - - * Group "**repo**" -
    - Click here to expand this section. - - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - * **`_repo.https://github.com/libxsmm/tpp-pytorch-extension`** (default) - - Environment variables: - - *CM_GIT_URL*: `https://github.com/libxsmm/tpp-pytorch-extension` - - Workflow: - -
    - - -#### Default variations - -`_repo.https://github.com/libxsmm/tpp-pytorch-extension` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,python3 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CONDA_ENV': ['yes']}` - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,pytorch,from.src - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CONDA_ENV': ['yes']}` - * CM names: `--adr.['pytorch']...` - - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) - * get,git,repo - * CM names: `--adr.['tpp-pex-src-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-tpp-pytorch-extension/_cm.json) - -___ -### Script output -`cmr "install get src from.src tpp-pex src-tpp-pex [,variations]" -j` -#### New environment keys (filter) - -* `CM_TPP_PEX_*` -#### New environment keys auto-detected from customize diff --git a/docs/Compiler-automation/install-transformers-from-src/index.md b/docs/Compiler-automation/install-transformers-from-src/index.md deleted file mode 100644 index 0ac334c3c..000000000 --- a/docs/Compiler-automation/install-transformers-from-src/index.md +++ /dev/null @@ -1,196 +0,0 @@ -Automatically generated README for this automation recipe: **install-transformers-from-src** - -Category: **Compiler automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-transformers-from-src,88512c48ea5c4186) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,src,from.src,transformers,src-transformers* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install src from.src transformers src-transformers" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,src,from.src,transformers,src-transformers` - -`cm run script --tags=install,src,from.src,transformers,src-transformers[,variations] ` - -*or* - -`cmr "install src from.src transformers src-transformers"` - -`cmr "install src from.src transformers src-transformers [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,src,from.src,transformers,src-transformers' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,src,from.src,transformers,src-transformers"``` - -#### Run this script via Docker (beta) - -`cm docker script "install src from.src transformers src-transformers[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - * `_for-intel-mlperf-inference-v3.1-bert` - - Environment variables: - - *CM_CONDA_ENV*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,conda,_name.bert-pt - * CM names: `--adr.['conda']...` - - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) - * get,generic,conda-package,_package.python - * CM names: `--adr.['conda-package', 'python3']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.wheel,_source.conda-forge - * CM names: `--adr.['conda-package', 'wheel']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.setuptools,_source.conda-forge - * CM names: `--adr.['conda-package', 'setuptools']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * `_sha.#` - - Environment variables: - - *CM_GIT_CHECKOUT_SHA*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - - - * Group "**repo**" -
    - Click here to expand this section. - - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - * **`_repo.https://github.com/pytorch/pytorch`** (default) - - Environment variables: - - *CM_GIT_URL*: `https://github.com/huggingface/transformers` - - Workflow: - -
    - - -#### Default variations - -`_repo.https://github.com/pytorch/pytorch` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,python3 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CONDA_ENV': ['yes']}` - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,pytorch,from.src - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CONDA_ENV': ['yes']}` - * CM names: `--adr.['pytorch']...` - - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) - * get,git,repo - * CM names: `--adr.['transformers-src-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-transformers-from-src/_cm.json) - -___ -### Script output -`cmr "install src from.src transformers src-transformers [,variations]" -j` -#### New environment keys (filter) - -* `CM_TRANSFORMERS_*` -#### New environment keys auto-detected from customize diff --git a/docs/Dashboard-automation/index.md b/docs/Dashboard-automation/index.md new file mode 100644 index 000000000..8956edd96 --- /dev/null +++ b/docs/Dashboard-automation/index.md @@ -0,0 +1,3 @@ +The Dashboard automation category contains the following scripts: + +- [publish-results-to-dashboard](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/publish-results-to-dashboard/README.md) diff --git a/docs/Dashboard-automation/publish-results-to-dashboard/index.md b/docs/Dashboard-automation/publish-results-to-dashboard/index.md deleted file mode 100644 index d59985009..000000000 --- a/docs/Dashboard-automation/publish-results-to-dashboard/index.md +++ /dev/null @@ -1,123 +0,0 @@ -Automatically generated README for this automation recipe: **publish-results-to-dashboard** - -Category: **Dashboard automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=publish-results-to-dashboard,4af3a2d09f14412b) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *publish-results,dashboard* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "publish-results dashboard" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=publish-results,dashboard` - -`cm run script --tags=publish-results,dashboard ` - -*or* - -`cmr "publish-results dashboard"` - -`cmr "publish-results dashboard " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'publish-results,dashboard' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="publish-results,dashboard"``` - -#### Run this script via Docker (beta) - -`cm docker script "publish-results dashboard" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/_cm.json)*** - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_wandb - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/publish-results-to-dashboard/_cm.json) - -___ -### Script output -`cmr "publish-results dashboard " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk/index.md deleted file mode 100644 index 45d8ea2db..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-android-sdk/index.md +++ /dev/null @@ -1,151 +0,0 @@ -Automatically generated README for this automation recipe: **get-android-sdk** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-android-sdk,8c5b4b83d49c441a) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,android,sdk,android-sdk* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get android sdk android-sdk" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,android,sdk,android-sdk` - -`cm run script --tags=get,android,sdk,android-sdk [--input_flags]` - -*or* - -`cmr "get android sdk android-sdk"` - -`cmr "get android sdk android-sdk " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,android,sdk,android-sdk' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,android,sdk,android-sdk"``` - -#### Run this script via Docker (beta) - -`cm docker script "get android sdk android-sdk" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--android_cmake_version=value` → `CM_ANDROID_CMAKE_VERSION=value` -* `--android_ndk_version=value` → `CM_ANDROID_NDK_VERSION=value` -* `--android_version=value` → `CM_ANDROID_VERSION=value` -* `--build_tools_version=value` → `CM_ANDROID_BUILD_TOOLS_VERSION=value` -* `--cmdline_tools_version=value` → `CM_ANDROID_CMDLINE_TOOLS_VERSION=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "android_cmake_version":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_ANDROID_BUILD_TOOLS_VERSION: `29.0.3` -* CM_ANDROID_CMAKE_VERSION: `3.6.4111459` -* CM_ANDROID_CMDLINE_TOOLS_URL: `https://dl.google.com/android/repository/commandlinetools-${CM_ANDROID_CMDLINE_TOOLS_OS}-${CM_ANDROID_CMDLINE_TOOLS_VERSION}_latest.zip` -* CM_ANDROID_CMDLINE_TOOLS_VERSION: `9123335` -* CM_ANDROID_NDK_VERSION: `21.3.6528147` -* CM_ANDROID_VERSION: `30` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,java - - CM script: [get-java](https://github.com/mlcommons/cm4mlops/tree/master/script/get-java) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-android-sdk/_cm.json) - -___ -### Script output -`cmr "get android sdk android-sdk " [--input_flags] -j` -#### New environment keys (filter) - -* `+PATH` -* `ANDROID_HOME` -* `ANDROID_NDK_HOME` -* `CM_ANDROID_HOME` -#### New environment keys auto-detected from customize - -* `CM_ANDROID_HOME` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-aria2/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-aria2/index.md deleted file mode 100644 index daf2c012d..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-aria2/index.md +++ /dev/null @@ -1,148 +0,0 @@ -Automatically generated README for this automation recipe: **get-aria2** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-aria2,d83419a90a0c40d0) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *get,aria2,get-aria2* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get aria2 get-aria2" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,aria2,get-aria2` - -`cm run script --tags=get,aria2,get-aria2 [--input_flags]` - -*or* - -`cmr "get aria2 get-aria2"` - -`cmr "get aria2 get-aria2 " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,aria2,get-aria2' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,aria2,get-aria2"``` - -#### Run this script via Docker (beta) - -`cm docker script "get aria2 get-aria2" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--install=value` → `CM_FORCE_INSTALL=value` -* `--src=value` → `CM_ARIA2_BUILD_FROM_SRC=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "install":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/_cm.yaml)*** - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-aria2/_cm.yaml) - -___ -### Script output -`cmr "get aria2 get-aria2 " [--input_flags] -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_ARIA2_*` -#### New environment keys auto-detected from customize - -* `CM_ARIA2_BIN_WITH_PATH` -* `CM_ARIA2_DOWNLOAD_DIR` -* `CM_ARIA2_DOWNLOAD_FILE` -* `CM_ARIA2_DOWNLOAD_FILE2` -* `CM_ARIA2_DOWNLOAD_URL` -* `CM_ARIA2_INSTALLED_PATH` -* `CM_ARIA2_INSTALLED_TO_CACHE` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-bazel/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-bazel/index.md deleted file mode 100644 index d10d39230..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-bazel/index.md +++ /dev/null @@ -1,127 +0,0 @@ -Automatically generated README for this automation recipe: **get-bazel** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-bazel,eaef0be38bac493c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,bazel,get-bazel* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get bazel get-bazel" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,bazel,get-bazel` - -`cm run script --tags=get,bazel,get-bazel ` - -*or* - -`cmr "get bazel get-bazel"` - -`cmr "get bazel get-bazel " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,bazel,get-bazel' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,bazel,get-bazel"``` - -#### Run this script via Docker (beta) - -`cm docker script "get bazel get-bazel" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/_cm.json)*** - * install,bazel - * Enable this dependency only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [install-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/install-bazel) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-bazel/_cm.json) - -___ -### Script output -`cmr "get bazel get-bazel " -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_BAZEL_*` -#### New environment keys auto-detected from customize - -* `CM_BAZEL_CACHE_TAGS` -* `CM_BAZEL_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-blis/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-blis/index.md deleted file mode 100644 index dad973bb2..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-blis/index.md +++ /dev/null @@ -1,158 +0,0 @@ -Automatically generated README for this automation recipe: **get-blis** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-blis,ea6e1cf75242456c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,lib,blis* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get lib blis" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,lib,blis` - -`cm run script --tags=get,lib,blis[,variations] ` - -*or* - -`cmr "get lib blis"` - -`cmr "get lib blis [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,lib,blis' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,lib,blis"``` - -#### Run this script via Docker (beta) - -`cm docker script "get lib blis[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**source**" -
    - Click here to expand this section. - - * `_amd` - - Workflow: - * **`_flame`** (default) - - Workflow: - -
    - - -#### Default variations - -`_flame` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `master` - -* `0.9.0` -* `master` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/_cm.json)*** - * get,git - * CM names: `--adr.['blis-source-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-blis/_cm.json) - -___ -### Script output -`cmr "get lib blis [,variations]" -j` -#### New environment keys (filter) - -* `+LD_LIBRARY_PATH` -* `CM_BLIS_INC` -* `CM_BLIS_INSTALL_PATH` -* `CM_BLIS_LIB` -* `CM_BLIS_SRC_PATH` -#### New environment keys auto-detected from customize - -* `CM_BLIS_INC` -* `CM_BLIS_INSTALL_PATH` -* `CM_BLIS_LIB` -* `CM_BLIS_SRC_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-brew/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-brew/index.md deleted file mode 100644 index 7bd857bdb..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-brew/index.md +++ /dev/null @@ -1,117 +0,0 @@ -Automatically generated README for this automation recipe: **get-brew** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-brew,4a2c5eab1ccf484f) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,brew* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get brew" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,brew` - -`cm run script --tags=get,brew ` - -*or* - -`cmr "get brew"` - -`cmr "get brew " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,brew' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,brew"``` - -#### Run this script via Docker (beta) - -`cm docker script "get brew" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/_cm.json) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-brew/_cm.json) - -___ -### Script output -`cmr "get brew " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-cmake/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-cmake/index.md deleted file mode 100644 index cc5c63cd7..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-cmake/index.md +++ /dev/null @@ -1,130 +0,0 @@ -Automatically generated README for this automation recipe: **get-cmake** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cmake,52bf974d791b4fc8) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,cmake,get-cmake* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get cmake get-cmake" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,cmake,get-cmake` - -`cm run script --tags=get,cmake,get-cmake ` - -*or* - -`cmr "get cmake get-cmake"` - -`cmr "get cmake get-cmake " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,cmake,get-cmake' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,cmake,get-cmake"``` - -#### Run this script via Docker (beta) - -`cm docker script "get cmake get-cmake" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/_cm.json)*** - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/_cm.json)*** - * install,cmake,prebuilt - * Enable this dependency only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [install-cmake-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cmake-prebuilt) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmake/_cm.json) - -___ -### Script output -`cmr "get cmake get-cmake " -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_CMAKE_*` -* `CM_MAKE_CORES` -#### New environment keys auto-detected from customize - -* `CM_CMAKE_CACHE_TAGS` -* `CM_MAKE_CORES` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5/index.md deleted file mode 100644 index 6e90b97e0..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-cmsis_5/index.md +++ /dev/null @@ -1,149 +0,0 @@ -Automatically generated README for this automation recipe: **get-cmsis_5** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-cmsis_5,2258c212b11443f5) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,cmsis,cmsis_5,arm-software* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get cmsis cmsis_5 arm-software" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,cmsis,cmsis_5,arm-software` - -`cm run script --tags=get,cmsis,cmsis_5,arm-software[,variations] ` - -*or* - -`cmr "get cmsis cmsis_5 arm-software"` - -`cmr "get cmsis cmsis_5 arm-software [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,cmsis,cmsis_5,arm-software' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,cmsis,cmsis_5,arm-software"``` - -#### Run this script via Docker (beta) - -`cm docker script "get cmsis cmsis_5 arm-software[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_recurse-submodules` - - Environment variables: - - *CM_GIT_RECURSE_SUBMODULES*: `--recurse-submodules` - - Workflow: - * `_short-history` - - Environment variables: - - *CM_GIT_DEPTH*: `--depth 10` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_DEPTH: `` -* CM_GIT_PATCH: `no` -* CM_GIT_URL: `https://github.com/ARM-software/CMSIS_5.git` - -
    - -#### Versions -Default version: `custom` - -* `custom` -* `develop` -* `master` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-cmsis_5/_cm.json) - -___ -### Script output -`cmr "get cmsis cmsis_5 arm-software [,variations]" -j` -#### New environment keys (filter) - -* `CMSIS*` -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-docker/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-docker/index.md deleted file mode 100644 index a836ce6de..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-docker/index.md +++ /dev/null @@ -1,119 +0,0 @@ -Automatically generated README for this automation recipe: **get-docker** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-docker,6192accce4234084) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,install,docker,engine* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get install docker engine" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,install,docker,engine` - -`cm run script --tags=get,install,docker,engine ` - -*or* - -`cmr "get install docker engine"` - -`cmr "get install docker engine " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,install,docker,engine' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,install,docker,engine"``` - -#### Run this script via Docker (beta) - -`cm docker script "get install docker engine" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/_cm.json) - 1. ***Run native script if exists*** - * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/run-ubuntu.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-docker/_cm.json) - -___ -### Script output -`cmr "get install docker engine " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util/index.md deleted file mode 100644 index ef0ece7f9..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util/index.md +++ /dev/null @@ -1,227 +0,0 @@ -Automatically generated README for this automation recipe: **get-generic-sys-util** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-generic-sys-util,bb0393afa8404a11) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,sys-util,generic,generic-sys-util* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get sys-util generic generic-sys-util" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,sys-util,generic,generic-sys-util` - -`cm run script --tags=get,sys-util,generic,generic-sys-util[,variations] ` - -*or* - -`cmr "get sys-util generic generic-sys-util"` - -`cmr "get sys-util generic generic-sys-util [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,sys-util,generic,generic-sys-util' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,sys-util,generic,generic-sys-util"``` - -#### Run this script via Docker (beta) - -`cm docker script "get sys-util generic generic-sys-util[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_g++-12` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `g++12` - - Workflow: - * `_gflags-dev` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `gflags-dev` - - Workflow: - * `_git-lfs` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `git-lfs` - - Workflow: - * `_glog-dev` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `glog-dev` - - Workflow: - * `_libboost-all-dev` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `libboost-all-dev` - - Workflow: - * `_libffi7` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `libffi7` - - Workflow: - * `_libgmock-dev` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `libgmock-dev` - - Workflow: - * `_libmpfr-dev` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `libmpfr-dev` - - Workflow: - * `_libnuma-dev` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `libnuma-dev` - - Workflow: - * `_libpci-dev` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `libpci-dev` - - Workflow: - * `_libre2-dev` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `libre2-dev` - - Workflow: - * `_libudev-dev` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `libudev-dev` - - Workflow: - * `_ninja-build` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `ninja-build` - - Workflow: - * `_ntpdate` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `ntpdate` - - Workflow: - * `_numactl` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `numactl` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * install,numactl,from.src - * Enable this dependency only if all ENV vars are set:
    -`{'CM_HOST_OS_FLAVOR': ['rhel'], 'CM_HOST_OS_VERSION': ['9.1', '9.2', '9.3']}` - - CM script: [install-numactl-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-numactl-from-src) - * `_nvidia-cuda-toolkit` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `nvidia-cuda-toolkit` - - Workflow: - * `_rapidjson-dev` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `rapidjson-dev` - - Workflow: - * `_rsync` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `rsync` - - Workflow: - * `_screen` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `screen` - - Workflow: - * `_sox` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `sox` - - Workflow: - * `_transmission` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `transmission` - - Workflow: - * `_zlib` - - Environment variables: - - *CM_SYS_UTIL_NAME*: `zlib` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_CLEAN_DIRS: `bin` -* CM_SUDO: `sudo` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-sys-util/_cm.json) - -___ -### Script output -`cmr "get sys-util generic generic-sys-util [,variations]" -j` -#### New environment keys (filter) - -* `+PATH` -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-google-test/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-google-test/index.md deleted file mode 100644 index 185960383..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-google-test/index.md +++ /dev/null @@ -1,137 +0,0 @@ -Automatically generated README for this automation recipe: **get-google-test** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-google-test,02945138a5614253) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,google-test,googletest,gtest,test,google* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get google-test googletest gtest test google" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,google-test,googletest,gtest,test,google` - -`cm run script --tags=get,google-test,googletest,gtest,test,google ` - -*or* - -`cmr "get google-test googletest gtest test google"` - -`cmr "get google-test googletest gtest test google " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,google-test,googletest,gtest,test,google' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,google-test,googletest,gtest,test,google"``` - -#### Run this script via Docker (beta) - -`cm docker script "get google-test googletest gtest test google" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `1.14.0` - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/_cm.json)*** - * get,cmake - * CM names: `--adr.['cmake']...` - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * get,compiler - * CM names: `--adr.['compiler']...` - - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/_cm.json)*** - * get,git,repo,_repo.https://github.com/google/googletest.git - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-google-test/_cm.json) - -___ -### Script output -`cmr "get google-test googletest gtest test google " -j` -#### New environment keys (filter) - -* `+C_INCLUDE_PATH` -* `+LD_LIBRARY_PATH` -* `CM_GOOGLE_TEST_INSTALL_PATH` -* `CM_GOOGLE_TEST_SRC_PATH` -#### New environment keys auto-detected from customize - -* `CM_GOOGLE_TEST_INSTALL_PATH` -* `CM_GOOGLE_TEST_SRC_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-java/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-java/index.md deleted file mode 100644 index ae31b39ca..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-java/index.md +++ /dev/null @@ -1,165 +0,0 @@ -Automatically generated README for this automation recipe: **get-java** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-java,9399d0e785704f8c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,java* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get java" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,java` - -`cm run script --tags=get,java[,variations] [--input_flags]` - -*or* - -`cmr "get java"` - -`cmr "get java [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,java' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,java"``` - -#### Run this script via Docker (beta) - -`cm docker script "get java[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_install` - - Environment variables: - - *CM_JAVA_PREBUILT_INSTALL*: `on` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--install=value` → `CM_JAVA_PREBUILT_INSTALL=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "install":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_JAVA_PREBUILT_VERSION: `19` -* CM_JAVA_PREBUILT_BUILD: `36` -* CM_JAVA_PREBUILT_URL: `https://download.java.net/openjdk/jdk${CM_JAVA_PREBUILT_VERSION}/ri/` -* CM_JAVA_PREBUILT_FILENAME: `openjdk-${CM_JAVA_PREBUILT_VERSION}+${CM_JAVA_PREBUILT_BUILD}_${CM_JAVA_PREBUILT_HOST_OS}-x64_bin` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-java/_cm.json) - -___ -### Script output -`cmr "get java [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_JAVA_*` -* `JAVA_HOME` -#### New environment keys auto-detected from customize - -* `CM_JAVA_BIN` -* `CM_JAVA_CACHE_TAGS` -* `CM_JAVA_PREBUILT_EXT` -* `CM_JAVA_PREBUILT_FILENAME` -* `CM_JAVA_PREBUILT_HOST_OS` -* `CM_JAVA_PREBUILT_URL` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-javac/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-javac/index.md deleted file mode 100644 index fbb21c123..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-javac/index.md +++ /dev/null @@ -1,168 +0,0 @@ -Automatically generated README for this automation recipe: **get-javac** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-javac,509280c497b24226) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,javac* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get javac" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,javac` - -`cm run script --tags=get,javac[,variations] [--input_flags]` - -*or* - -`cmr "get javac"` - -`cmr "get javac [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,javac' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,javac"``` - -#### Run this script via Docker (beta) - -`cm docker script "get javac[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_install` - - Environment variables: - - *CM_JAVAC_PREBUILT_INSTALL*: `on` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--install=value` → `CM_JAVAC_PREBUILT_INSTALL=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "install":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_JAVAC_PREBUILT_VERSION: `19` -* CM_JAVAC_PREBUILT_BUILD: `36` -* CM_JAVAC_PREBUILT_URL: `https://download.java.net/openjdk/jdk${CM_JAVAC_PREBUILT_VERSION}/ri/` -* CM_JAVAC_PREBUILT_FILENAME: `openjdk-${CM_JAVAC_PREBUILT_VERSION}+${CM_JAVAC_PREBUILT_BUILD}_${CM_JAVAC_PREBUILT_HOST_OS}-x64_bin` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-javac/_cm.json) - -___ -### Script output -`cmr "get javac [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_JAVAC_*` -* `CM_JAVA_*` -* `JAVA_HOME` -#### New environment keys auto-detected from customize - -* `CM_JAVAC_BIN` -* `CM_JAVAC_CACHE_TAGS` -* `CM_JAVAC_PREBUILT_EXT` -* `CM_JAVAC_PREBUILT_FILENAME` -* `CM_JAVAC_PREBUILT_HOST_OS` -* `CM_JAVAC_PREBUILT_URL` -* `CM_JAVA_BIN` -* `CM_JAVA_BIN_WITH_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn/index.md deleted file mode 100644 index 745886d07..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-armnn/index.md +++ /dev/null @@ -1,132 +0,0 @@ -Automatically generated README for this automation recipe: **get-lib-armnn** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-lib-armnn,9603a2e90fd44587) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,lib-armnn,lib,armnn* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get lib-armnn lib armnn" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,lib-armnn,lib,armnn` - -`cm run script --tags=get,lib-armnn,lib,armnn ` - -*or* - -`cmr "get lib-armnn lib armnn"` - -`cmr "get lib-armnn lib armnn " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,lib-armnn,lib,armnn' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,lib-armnn,lib,armnn"``` - -#### Run this script via Docker (beta) - -`cm docker script "get lib-armnn lib armnn" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `23.11` - -* `22.11` -* `23.05` -* `23.11` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/_cm.json)*** - * get,git,repo,_repo.https://github.com/ARM-software/armnn - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-armnn/_cm.json) - -___ -### Script output -`cmr "get lib-armnn lib armnn " -j` -#### New environment keys (filter) - -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+LD_LIBRARY_PATH` -* `CM_LIB_ARMNN_VERSION` -* `CM_LIB_DNNL_*` -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl/index.md deleted file mode 100644 index e12b39926..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl/index.md +++ /dev/null @@ -1,132 +0,0 @@ -Automatically generated README for this automation recipe: **get-lib-dnnl** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-lib-dnnl,1cd35a6a3b0b4530) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,lib-dnnl,lib,dnnl* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get lib-dnnl lib dnnl" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,lib-dnnl,lib,dnnl` - -`cm run script --tags=get,lib-dnnl,lib,dnnl ` - -*or* - -`cmr "get lib-dnnl lib dnnl"` - -`cmr "get lib-dnnl lib dnnl " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,lib-dnnl,lib,dnnl' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,lib-dnnl,lib,dnnl"``` - -#### Run this script via Docker (beta) - -`cm docker script "get lib-dnnl lib dnnl" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `dev` - -* `2.2.4` -* `dev` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/_cm.json)*** - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * cmake,get-cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-dnnl/_cm.json) - -___ -### Script output -`cmr "get lib-dnnl lib dnnl " -j` -#### New environment keys (filter) - -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+LD_LIBRARY_PATH` -* `CM_LIB_DNNL_*` -#### New environment keys auto-detected from customize - -* `CM_LIB_DNNL_INSTALL_DIR` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf/index.md deleted file mode 100644 index 80608d46d..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf/index.md +++ /dev/null @@ -1,154 +0,0 @@ -Automatically generated README for this automation recipe: **get-lib-protobuf** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-lib-protobuf,db45f1eb73934f91) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,google-protobuf,protobuf,lib,lib-protobuf,google* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get google-protobuf protobuf lib lib-protobuf google" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,google-protobuf,protobuf,lib,lib-protobuf,google` - -`cm run script --tags=get,google-protobuf,protobuf,lib,lib-protobuf,google[,variations] ` - -*or* - -`cmr "get google-protobuf protobuf lib lib-protobuf google"` - -`cmr "get google-protobuf protobuf lib lib-protobuf google [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,google-protobuf,protobuf,lib,lib-protobuf,google' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,google-protobuf,protobuf,lib,lib-protobuf,google"``` - -#### Run this script via Docker (beta) - -`cm docker script "get google-protobuf protobuf lib lib-protobuf google[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_TMP_GIT_CHECKOUT*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `1.13.0` - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/_cm.json)*** - * get,cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * get,gcc - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/_cm.json)*** - * get,git,repo,_repo.https://github.com/google/protobuf.git - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-protobuf/_cm.json) - -___ -### Script output -`cmr "get google-protobuf protobuf lib lib-protobuf google [,variations]" -j` -#### New environment keys (filter) - -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+LD_LIBRARY_PATH` -* `CM_GOOGLE_PROTOBUF_INSTALL_PATH` -* `CM_GOOGLE_PROTOBUF_SRC_PATH` -#### New environment keys auto-detected from customize - -* `CM_GOOGLE_PROTOBUF_INSTALL_PATH` -* `CM_GOOGLE_PROTOBUF_SRC_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api/index.md deleted file mode 100644 index 243dc586f..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api/index.md +++ /dev/null @@ -1,131 +0,0 @@ -Automatically generated README for this automation recipe: **get-lib-qaic-api** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-lib-qaic-api,1e253ae184e44f23) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,api,lib-qaic-api,lib,qaic* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get api lib-qaic-api lib qaic" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,api,lib-qaic-api,lib,qaic` - -`cm run script --tags=get,api,lib-qaic-api,lib,qaic ` - -*or* - -`cmr "get api lib-qaic-api lib qaic"` - -`cmr "get api lib-qaic-api lib qaic " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,api,lib-qaic-api,lib,qaic' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,api,lib-qaic-api,lib,qaic"``` - -#### Run this script via Docker (beta) - -`cm docker script "get api lib-qaic-api lib qaic" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `master` - -* `master` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-lib-qaic-api/_cm.json) - -___ -### Script output -`cmr "get api lib-qaic-api lib qaic " -j` -#### New environment keys (filter) - -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+LD_LIBRARY_PATH` -* `CM_LIB_QAIC_*` -* `CM_QAIC_API_*` -#### New environment keys auto-detected from customize - -* `CM_QAIC_API_INC_FILE` -* `CM_QAIC_API_SRC_FILE` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker/index.md deleted file mode 100644 index 284218740..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker/index.md +++ /dev/null @@ -1,121 +0,0 @@ -Automatically generated README for this automation recipe: **get-nvidia-docker** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-nvidia-docker,465ae240998e4779) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get install nvidia nvidia-container-toolkit nvidia-docker engine" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine` - -`cm run script --tags=get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine ` - -*or* - -`cmr "get install nvidia nvidia-container-toolkit nvidia-docker engine"` - -`cmr "get install nvidia nvidia-container-toolkit nvidia-docker engine " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,install,nvidia,nvidia-container-toolkit,nvidia-docker,engine"``` - -#### Run this script via Docker (beta) - -`cm docker script "get install nvidia nvidia-container-toolkit nvidia-docker engine" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,docker - - CM script: [get-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/get-docker) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/_cm.json) - 1. ***Run native script if exists*** - * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/run-ubuntu.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-docker/_cm.json) - -___ -### Script output -`cmr "get install nvidia nvidia-container-toolkit nvidia-docker engine " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-openssl/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-openssl/index.md deleted file mode 100644 index a348f1272..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-openssl/index.md +++ /dev/null @@ -1,125 +0,0 @@ -Automatically generated README for this automation recipe: **get-openssl** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-openssl,febdae70e9e64e30) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,openssl,lib,lib-openssl* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get openssl lib lib-openssl" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,openssl,lib,lib-openssl` - -`cm run script --tags=get,openssl,lib,lib-openssl ` - -*or* - -`cmr "get openssl lib lib-openssl"` - -`cmr "get openssl lib lib-openssl " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,openssl,lib,lib-openssl' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,openssl,lib,lib-openssl"``` - -#### Run this script via Docker (beta) - -`cm docker script "get openssl lib lib-openssl" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/_cm.json)*** - * install,openssl - * Enable this dependency only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [install-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/install-openssl) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-openssl/_cm.json) - -___ -### Script output -`cmr "get openssl lib lib-openssl " -j` -#### New environment keys (filter) - -* `+LD_LIBRARY_PATH` -* `CM_OPENSSL_*` -#### New environment keys auto-detected from customize - -* `CM_OPENSSL_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-rclone/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-rclone/index.md deleted file mode 100644 index 6dcd4adda..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-rclone/index.md +++ /dev/null @@ -1,150 +0,0 @@ -Automatically generated README for this automation recipe: **get-rclone** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-rclone,22ffb43c49c9419e) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,rclone* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get rclone" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,rclone` - -`cm run script --tags=get,rclone[,variations] ` - -*or* - -`cmr "get rclone"` - -`cmr "get rclone [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,rclone' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,rclone"``` - -#### Run this script via Docker (beta) - -`cm docker script "get rclone[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_gdrive` - - Environment variables: - - *CM_RCLONE_GDRIVE*: `yes` - - Workflow: - * `_system` - - Environment variables: - - *CM_RCLONE_SYSTEM*: `yes` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `1.65.2` - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-rclone/_cm.json) - -___ -### Script output -`cmr "get rclone [,variations]" -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_RCLONE_BIN_WITH_PATH` -* `CM_RCLONE_CACHE_TAGS` -* `CM_RCLONE_VERSION` -#### New environment keys auto-detected from customize - -* `CM_RCLONE_BIN_WITH_PATH` -* `CM_RCLONE_CACHE_TAGS` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm/index.md deleted file mode 100644 index 143e07b7d..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm/index.md +++ /dev/null @@ -1,156 +0,0 @@ -Automatically generated README for this automation recipe: **get-sys-utils-cm** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-sys-utils-cm,bc90993277e84b8e) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *get,sys-utils-cm* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get sys-utils-cm" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,sys-utils-cm` - -`cm run script --tags=get,sys-utils-cm[,variations] [--input_flags]` - -*or* - -`cmr "get sys-utils-cm"` - -`cmr "get sys-utils-cm [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,sys-utils-cm' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,sys-utils-cm"``` - -#### Run this script via Docker (beta) - -`cm docker script "get sys-utils-cm[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_user` - - Environment variables: - - *CM_PYTHON_PIP_USER*: `--user` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--skip=value` → `CM_SKIP_SYS_UTILS=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "skip":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/_cm.yaml) - 1. ***Run native script if exists*** - * [run-arch.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-arch.sh) - * [run-debian.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-debian.sh) - * [run-macos.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-macos.sh) - * [run-rhel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-rhel.sh) - * [run-sles.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-sles.sh) - * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/run-ubuntu.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/_cm.yaml) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-cm/_cm.yaml) - -___ -### Script output -`cmr "get sys-utils-cm [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `+PATH` -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min/index.md deleted file mode 100644 index c200ad0d0..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min/index.md +++ /dev/null @@ -1,117 +0,0 @@ -Automatically generated README for this automation recipe: **get-sys-utils-min** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-sys-utils-min,a9af7714d3d94779) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,sys-utils-min* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get sys-utils-min" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,sys-utils-min` - -`cm run script --tags=get,sys-utils-min ` - -*or* - -`cmr "get sys-utils-min"` - -`cmr "get sys-utils-min " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,sys-utils-min' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,sys-utils-min"``` - -#### Run this script via Docker (beta) - -`cm docker script "get sys-utils-min" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-sys-utils-min/_cm.json) - -___ -### Script output -`cmr "get sys-utils-min " -j` -#### New environment keys (filter) - -* `+PATH` -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk/index.md deleted file mode 100644 index aa2f7dba5..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk/index.md +++ /dev/null @@ -1,138 +0,0 @@ -Automatically generated README for this automation recipe: **get-xilinx-sdk** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-xilinx-sdk,76d4d1bd09df4490) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,xilinx,sdk* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get xilinx sdk" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,xilinx,sdk` - -`cm run script --tags=get,xilinx,sdk [--input_flags]` - -*or* - -`cmr "get xilinx sdk"` - -`cmr "get xilinx sdk " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,xilinx,sdk' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,xilinx,sdk"``` - -#### Run this script via Docker (beta) - -`cm docker script "get xilinx sdk" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--input=value` → `CM_XILINX_SDK_FILE_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "input":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `2019.1` - -* `2019.1` -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-xilinx-sdk/_cm.json) - -___ -### Script output -`cmr "get xilinx sdk " [--input_flags] -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_XILINX_*` -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn/index.md deleted file mode 100644 index acf66bf02..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/get-zendnn/index.md +++ /dev/null @@ -1,127 +0,0 @@ -Automatically generated README for this automation recipe: **get-zendnn** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-zendnn,d1c6feb0ee684b09) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,zendnn,amd,from.src* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get zendnn amd from.src" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,zendnn,amd,from.src` - -`cm run script --tags=get,zendnn,amd,from.src ` - -*or* - -`cmr "get zendnn amd from.src"` - -`cmr "get zendnn amd from.src " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,zendnn,amd,from.src' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,zendnn,amd,from.src"``` - -#### Run this script via Docker (beta) - -`cm docker script "get zendnn amd from.src" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/_cm.json)*** - * get,amd,aocl - * CM names: `--adr.['aocl']...` - - CM script: [get-aocl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aocl) - * get,lib,blis,_amd - - CM script: [get-blis](https://github.com/mlcommons/cm4mlops/tree/master/script/get-blis) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,git,_repo.https://github.com/amd/ZenDNN.git - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zendnn/_cm.json) - -___ -### Script output -`cmr "get zendnn amd from.src " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/index.md new file mode 100644 index 000000000..ebf7f44ab --- /dev/null +++ b/docs/Detection-or-installation-of-tools-and-artifacts/index.md @@ -0,0 +1,31 @@ +The Detection or installation of tools and artifacts category contains the following scripts: + +- [get-android-sdk](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-android-sdk/README.md) +- [get-aria2](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-aria2/README.md) +- [get-bazel](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-bazel/README.md) +- [get-blis](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-blis/README.md) +- [get-brew](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-brew/README.md) +- [get-cmake](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-cmake/README.md) +- [get-cmsis_5](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-cmsis_5/README.md) +- [get-docker](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-docker/README.md) +- [get-generic-sys-util](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-generic-sys-util/README.md) +- [get-google-test](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-google-test/README.md) +- [get-java](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-java/README.md) +- [get-javac](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-javac/README.md) +- [get-lib-armnn](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-lib-armnn/README.md) +- [get-lib-dnnl](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-lib-dnnl/README.md) +- [get-lib-protobuf](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-lib-protobuf/README.md) +- [get-lib-qaic-api](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-lib-qaic-api/README.md) +- [get-nvidia-docker](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-nvidia-docker/README.md) +- [get-openssl](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-openssl/README.md) +- [get-rclone](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-rclone/README.md) +- [get-sys-utils-cm](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-sys-utils-cm/README.md) +- [get-sys-utils-min](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-sys-utils-min/README.md) +- [get-xilinx-sdk](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-xilinx-sdk/README.md) +- [get-zendnn](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-zendnn/README.md) +- [install-bazel](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-bazel/README.md) +- [install-cmake-prebuilt](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-cmake-prebuilt/README.md) +- [install-gflags](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-gflags/README.md) +- [install-github-cli](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-github-cli/README.md) +- [install-numactl-from-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-numactl-from-src/README.md) +- [install-openssl](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-openssl/README.md) diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-bazel/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-bazel/index.md deleted file mode 100644 index 8787010ef..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/install-bazel/index.md +++ /dev/null @@ -1,134 +0,0 @@ -Automatically generated README for this automation recipe: **install-bazel** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-bazel,dfd3d2bf5b764175) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,script,bazel* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install script bazel" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,script,bazel` - -`cm run script --tags=install,script,bazel ` - -*or* - -`cmr "install script bazel"` - -`cmr "install script bazel " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,script,bazel' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,script,bazel"``` - -#### Run this script via Docker (beta) - -`cm docker script "install script bazel" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `7.0.2` - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/_cm.json) - 1. ***Run native script if exists*** - * [run-aarch64.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/run-aarch64.sh) - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/_cm.json) - 1. Run "postrocess" function from customize.py - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-bazel/_cm.json)*** - * get,bazel - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [get-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bazel) - -___ -### Script output -`cmr "install script bazel " -j` -#### New environment keys (filter) - -* `CM_BAZEL_*` -#### New environment keys auto-detected from customize - -* `CM_BAZEL_BIN_WITH_PATH` -* `CM_BAZEL_DOWNLOAD_FILE` -* `CM_BAZEL_DOWNLOAD_URL` -* `CM_BAZEL_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt/index.md deleted file mode 100644 index af835b71b..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt/index.md +++ /dev/null @@ -1,136 +0,0 @@ -Automatically generated README for this automation recipe: **install-cmake-prebuilt** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-cmake-prebuilt,5a39ef05992b4103) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake` - -`cm run script --tags=install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake ` - -*or* - -`cmr "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake"` - -`cmr "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,prebuilt,cmake,prebuilt-cmake,install-prebuilt-cmake"``` - -#### Run this script via Docker (beta) - -`cm docker script "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `3.28.3` - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/_cm.json) - 1. Run "postrocess" function from customize.py - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-cmake-prebuilt/_cm.json)*** - * get,cmake - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - -___ -### Script output -`cmr "install prebuilt cmake prebuilt-cmake install-prebuilt-cmake " -j` -#### New environment keys (filter) - -* `+C_INCLUDE_PATH` -* `+LD_LIBRARY_PATH` -* `+PATH` -* `CM_CMAKE_*` -* `CM_GET_DEPENDENT_CACHED_PATH` -#### New environment keys auto-detected from customize - -* `CM_CMAKE_BIN_WITH_PATH` -* `CM_CMAKE_INSTALLED_PATH` -* `CM_CMAKE_PACKAGE` -* `CM_GET_DEPENDENT_CACHED_PATH` \ No newline at end of file diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-gflags/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-gflags/index.md deleted file mode 100644 index dc4ab3c75..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/install-gflags/index.md +++ /dev/null @@ -1,127 +0,0 @@ -Automatically generated README for this automation recipe: **install-gflags** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-gflags,10bb562c29ea459e) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,src,get,gflags* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install src get gflags" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,src,get,gflags` - -`cm run script --tags=install,src,get,gflags ` - -*or* - -`cmr "install src get gflags"` - -`cmr "install src get gflags " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,src,get,gflags' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,src,get,gflags"``` - -#### Run this script via Docker (beta) - -`cm docker script "install src get gflags" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `2.2.2` - -* `2.2.2` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-gflags/_cm.json) - -___ -### Script output -`cmr "install src get gflags " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli/index.md deleted file mode 100644 index 65cfb01cb..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/install-github-cli/index.md +++ /dev/null @@ -1,121 +0,0 @@ -Automatically generated README for this automation recipe: **install-github-cli** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-github-cli,cd948ec309344bf8) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,gh,github,cli,github-cli* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install gh github cli github-cli" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,gh,github,cli,github-cli` - -`cm run script --tags=install,gh,github,cli,github-cli ` - -*or* - -`cmr "install gh github cli github-cli"` - -`cmr "install gh github cli github-cli " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,gh,github,cli,github-cli' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,gh,github,cli,github-cli"``` - -#### Run this script via Docker (beta) - -`cm docker script "install gh github cli github-cli" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/_cm.json) - 1. ***Run native script if exists*** - * [run-macos.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/run-macos.sh) - * [run-rhel.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/run-rhel.sh) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-github-cli/_cm.json) - -___ -### Script output -`cmr "install gh github cli github-cli " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src/index.md deleted file mode 100644 index dd69d6889..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src/index.md +++ /dev/null @@ -1,170 +0,0 @@ -Automatically generated README for this automation recipe: **install-numactl-from-src** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-numactl-from-src,4f355ae8ca1948b2) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,src,from.src,numactl,src-numactl* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install src from.src numactl src-numactl" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,src,from.src,numactl,src-numactl` - -`cm run script --tags=install,src,from.src,numactl,src-numactl[,variations] ` - -*or* - -`cmr "install src from.src numactl src-numactl"` - -`cmr "install src from.src numactl src-numactl [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,src,from.src,numactl,src-numactl' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,src,from.src,numactl,src-numactl"``` - -#### Run this script via Docker (beta) - -`cm docker script "install src from.src numactl src-numactl[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - * `_sha.#` - - Environment variables: - - *CM_GIT_CHECKOUT_SHA*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - - - * Group "**repo**" -
    - Click here to expand this section. - - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - * **`_repo.https://github.com/numactl/numactl`** (default) - - Environment variables: - - *CM_GIT_URL*: `https://github.com/numactl/numactl` - - Workflow: - -
    - - -#### Default variations - -`_repo.https://github.com/numactl/numactl` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,git,repo - * CM names: `--adr.['numactl-src-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-numactl-from-src/_cm.json) - -___ -### Script output -`cmr "install src from.src numactl src-numactl [,variations]" -j` -#### New environment keys (filter) - -* `+PATH` -* `CM_NUMACTL_*` -#### New environment keys auto-detected from customize diff --git a/docs/Detection-or-installation-of-tools-and-artifacts/install-openssl/index.md b/docs/Detection-or-installation-of-tools-and-artifacts/install-openssl/index.md deleted file mode 100644 index 7aec6efce..000000000 --- a/docs/Detection-or-installation-of-tools-and-artifacts/install-openssl/index.md +++ /dev/null @@ -1,134 +0,0 @@ -Automatically generated README for this automation recipe: **install-openssl** - -Category: **Detection or installation of tools and artifacts** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-openssl,be472d3b1d014169) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,src,openssl,openssl-lib* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install src openssl openssl-lib" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,src,openssl,openssl-lib` - -`cm run script --tags=install,src,openssl,openssl-lib ` - -*or* - -`cmr "install src openssl openssl-lib"` - -`cmr "install src openssl openssl-lib " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,src,openssl,openssl-lib' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,src,openssl,openssl-lib"``` - -#### Run this script via Docker (beta) - -`cm docker script "install src openssl openssl-lib" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `1.1.1` - -* `1.1.1` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-openssl/_cm.json)*** - * get,openssl - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [get-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-openssl) - -___ -### Script output -`cmr "install src openssl openssl-lib " -j` -#### New environment keys (filter) - -* `+LD_LIBRARY_PATH` -* `CM_OPENSSL_*` -#### New environment keys auto-detected from customize - -* `CM_OPENSSL_BIN_WITH_PATH` -* `CM_OPENSSL_INSTALLED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/benchmark-program/index.md b/docs/DevOps-automation/benchmark-program/index.md deleted file mode 100644 index cd0bbeba3..000000000 --- a/docs/DevOps-automation/benchmark-program/index.md +++ /dev/null @@ -1,151 +0,0 @@ -Automatically generated README for this automation recipe: **benchmark-program** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=benchmark-program,19f369ef47084895) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *benchmark,program* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "benchmark program" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=benchmark,program` - -`cm run script --tags=benchmark,program[,variations] ` - -*or* - -`cmr "benchmark program"` - -`cmr "benchmark program [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'benchmark,program' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="benchmark,program"``` - -#### Run this script via Docker (beta) - -`cm docker script "benchmark program[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_numactl` - - Workflow: - * `_numactl-interleave` - - Workflow: - * `_profile` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,profiler - - *Warning: no scripts found* - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_ENABLE_NUMACTL: `0` -* CM_ENABLE_PROFILING: `0` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/_cm.json)*** - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * set,performance,mode,_performance - * Enable this dependency only if all ENV vars are set:
    -`{'CM_SET_PERFORMANCE_MODE': ['on', 'yes', 'True', True]}` - - CM script: [set-performance-mode](https://github.com/mlcommons/cm4mlops/tree/master/script/set-performance-mode) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/_cm.json) - 1. ***Run native script if exists*** - * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/run-ubuntu.sh) - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program/_cm.json) - -___ -### Script output -`cmr "benchmark program [,variations]" -j` -#### New environment keys (filter) - -* `CM_RUN_CMD` -#### New environment keys auto-detected from customize - -* `CM_RUN_CMD` \ No newline at end of file diff --git a/docs/DevOps-automation/compile-program/index.md b/docs/DevOps-automation/compile-program/index.md deleted file mode 100644 index 057fdfe98..000000000 --- a/docs/DevOps-automation/compile-program/index.md +++ /dev/null @@ -1,128 +0,0 @@ -Automatically generated README for this automation recipe: **compile-program** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=compile-program,c05042ba005a4bfa) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program` - -`cm run script --tags=compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program ` - -*or* - -`cmr "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program"` - -`cmr "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="compile,program,c-program,cpp-program,compile-program,compile-c-program,compile-cpp-program"``` - -#### Run this script via Docker (beta) - -`cm docker script "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* SKIP_RECOMPILE: `no` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/_cm.json)*** - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,compiler - * CM names: `--adr.['compiler']...` - - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) - * get,compiler-flags - - CM script: [get-compiler-flags](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-flags) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/compile-program/_cm.json) - -___ -### Script output -`cmr "compile program c-program cpp-program compile-program compile-c-program compile-cpp-program " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/convert-csv-to-md/index.md b/docs/DevOps-automation/convert-csv-to-md/index.md deleted file mode 100644 index 129d8588b..000000000 --- a/docs/DevOps-automation/convert-csv-to-md/index.md +++ /dev/null @@ -1,143 +0,0 @@ -Automatically generated README for this automation recipe: **convert-csv-to-md** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=convert-csv-to-md,200a95b80bee4a25) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *csv-to-md,convert,to-md,from-csv* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "csv-to-md convert to-md from-csv" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=csv-to-md,convert,to-md,from-csv` - -`cm run script --tags=csv-to-md,convert,to-md,from-csv [--input_flags]` - -*or* - -`cmr "csv-to-md convert to-md from-csv"` - -`cmr "csv-to-md convert to-md from-csv " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'csv-to-md,convert,to-md,from-csv' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="csv-to-md,convert,to-md,from-csv"``` - -#### Run this script via Docker (beta) - -`cm docker script "csv-to-md convert to-md from-csv" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--csv_file=value` → `CM_CSV_FILE=value` -* `--md_file=value` → `CM_MD_FILE=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "csv_file":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/_cm.json)*** - * get,python3 - * CM names: `--adr.['python, python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_pandas - * CM names: `--adr.['pandas']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.tabulate - * CM names: `--adr.['tabulate']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/convert-csv-to-md/_cm.json) - -___ -### Script output -`cmr "csv-to-md convert to-md from-csv " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/copy-to-clipboard/index.md b/docs/DevOps-automation/copy-to-clipboard/index.md deleted file mode 100644 index 7122a24c0..000000000 --- a/docs/DevOps-automation/copy-to-clipboard/index.md +++ /dev/null @@ -1,141 +0,0 @@ -Automatically generated README for this automation recipe: **copy-to-clipboard** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=copy-to-clipboard,8b3aaa97ce58474d) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *copy,to,clipboard,copy-to-clipboard* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "copy to clipboard copy-to-clipboard" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=copy,to,clipboard,copy-to-clipboard` - -`cm run script --tags=copy,to,clipboard,copy-to-clipboard [--input_flags]` - -*or* - -`cmr "copy to clipboard copy-to-clipboard"` - -`cmr "copy to clipboard copy-to-clipboard " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'copy,to,clipboard,copy-to-clipboard' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="copy,to,clipboard,copy-to-clipboard"``` - -#### Run this script via Docker (beta) - -`cm docker script "copy to clipboard copy-to-clipboard" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--add_quotes=value` → `CM_COPY_TO_CLIPBOARD_TEXT_ADD_QUOTES=value` -* `--q=value` → `CM_COPY_TO_CLIPBOARD_TEXT_ADD_QUOTES=value` -* `--t=value` → `CM_COPY_TO_CLIPBOARD_TEXT=value` -* `--text=value` → `CM_COPY_TO_CLIPBOARD_TEXT=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "add_quotes":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/_cm.yaml)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_package.pyperclip - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/_cm.yaml) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/copy-to-clipboard/_cm.yaml) - -___ -### Script output -`cmr "copy to clipboard copy-to-clipboard " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/create-conda-env/index.md b/docs/DevOps-automation/create-conda-env/index.md deleted file mode 100644 index f97bb0f5a..000000000 --- a/docs/DevOps-automation/create-conda-env/index.md +++ /dev/null @@ -1,148 +0,0 @@ -Automatically generated README for this automation recipe: **create-conda-env** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=create-conda-env,e39e0b04c86a40f2) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *create,get,env,conda-env,conda-environment,create-conda-environment* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "create get env conda-env conda-environment create-conda-environment" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=create,get,env,conda-env,conda-environment,create-conda-environment` - -`cm run script --tags=create,get,env,conda-env,conda-environment,create-conda-environment[,variations] ` - -*or* - -`cmr "create get env conda-env conda-environment create-conda-environment"` - -`cmr "create get env conda-env conda-environment create-conda-environment [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'create,get,env,conda-env,conda-environment,create-conda-environment' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="create,get,env,conda-env,conda-environment,create-conda-environment"``` - -#### Run this script via Docker (beta) - -`cm docker script "create get env conda-env conda-environment create-conda-environment[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_name.#` - - Environment variables: - - *CM_CONDA_ENV_NAME*: `#` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,conda - * CM names: `--adr.['conda']...` - - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-conda-env/_cm.json) - -___ -### Script output -`cmr "create get env conda-env conda-environment create-conda-environment [,variations]" -j` -#### New environment keys (filter) - -* `+LD_LIBRARY_PATH` -* `+PATH` -* `CM_CONDA_BIN_PATH` -* `CM_CONDA_LIB_PATH` -* `CM_CONDA_PREFIX` -* `CONDA_PREFIX` -#### New environment keys auto-detected from customize - -* `CM_CONDA_BIN_PATH` -* `CM_CONDA_LIB_PATH` -* `CM_CONDA_PREFIX` \ No newline at end of file diff --git a/docs/DevOps-automation/create-patch/index.md b/docs/DevOps-automation/create-patch/index.md deleted file mode 100644 index 664c378f5..000000000 --- a/docs/DevOps-automation/create-patch/index.md +++ /dev/null @@ -1,135 +0,0 @@ -Automatically generated README for this automation recipe: **create-patch** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=create-patch,0659dc1f75664c65) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *create,patch* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "create patch" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=create,patch` - -`cm run script --tags=create,patch [--input_flags]` - -*or* - -`cmr "create patch"` - -`cmr "create patch " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'create,patch' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="create,patch"``` - -#### Run this script via Docker (beta) - -`cm docker script "create patch" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--exclude=value` → `CM_CREATE_PATCH_EXCLUDE=value` -* `--new=value` → `CM_CREATE_PATCH_NEW=value` -* `--old=value` → `CM_CREATE_PATCH_OLD=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "exclude":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/_cm.yaml) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-patch/_cm.yaml) - -___ -### Script output -`cmr "create patch " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/detect-sudo/index.md b/docs/DevOps-automation/detect-sudo/index.md deleted file mode 100644 index 49e48b530..000000000 --- a/docs/DevOps-automation/detect-sudo/index.md +++ /dev/null @@ -1,120 +0,0 @@ -Automatically generated README for this automation recipe: **detect-sudo** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=detect-sudo,1d47ffc556e248dc) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *detect,sudo,access* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "detect sudo access" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=detect,sudo,access` - -`cm run script --tags=detect,sudo,access ` - -*or* - -`cmr "detect sudo access"` - -`cmr "detect sudo access " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'detect,sudo,access' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="detect,sudo,access"``` - -#### Run this script via Docker (beta) - -`cm docker script "detect sudo access" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/_cm.yaml) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/_cm.yaml) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-sudo/_cm.yaml) - -___ -### Script output -`cmr "detect sudo access " -j` -#### New environment keys (filter) - -* `CM_SUDO_*` -#### New environment keys auto-detected from customize - -* `CM_SUDO_USER` \ No newline at end of file diff --git a/docs/DevOps-automation/download-and-extract/index.md b/docs/DevOps-automation/download-and-extract/index.md deleted file mode 100644 index 1d802285e..000000000 --- a/docs/DevOps-automation/download-and-extract/index.md +++ /dev/null @@ -1,216 +0,0 @@ -Automatically generated README for this automation recipe: **download-and-extract** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=download-and-extract,c67e81a4ce2649f5) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *download-and-extract,file* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "download-and-extract file" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=download-and-extract,file` - -`cm run script --tags=download-and-extract,file[,variations] [--input_flags]` - -*or* - -`cmr "download-and-extract file"` - -`cmr "download-and-extract file [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'download-and-extract,file' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="download-and-extract,file"``` - -#### Run this script via Docker (beta) - -`cm docker script "download-and-extract file[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_extract` - - Environment variables: - - *CM_DAE_EXTRACT_DOWNLOADED*: `yes` - - Workflow: - * `_keep` - - Environment variables: - - *CM_EXTRACT_REMOVE_EXTRACTED*: `no` - - Workflow: - * `_no-remove-extracted` - - Environment variables: - - *CM_EXTRACT_REMOVE_EXTRACTED*: `no` - - Workflow: - * `_url.#` - - Environment variables: - - *CM_DAE_URL*: `#` - - Workflow: - -
    - - - * Group "**download-tool**" -
    - Click here to expand this section. - - * **`_cmutil`** (default) - - Workflow: - * `_curl` - - Workflow: - * `_gdown` - - Workflow: - * `_rclone` - - Workflow: - * `_torrent` - - Environment variables: - - *CM_DAE_DOWNLOAD_USING_TORRENT*: `yes` - - *CM_TORRENT_DOWNLOADED_FILE_NAME*: `<<>>` - - *CM_TORRENT_DOWNLOADED_PATH_ENV_KEY*: `CM_DAE_FILEPATH` - - *CM_TORRENT_WAIT_UNTIL_COMPLETED*: `yes` - - Workflow: - 1. ***Read "prehook_deps" on other CM scripts*** - * download,torrent - - CM script: [download-torrent](https://github.com/mlcommons/cm4mlops/tree/master/script/download-torrent) - * `_wget` - - Workflow: - -
    - - -#### Default variations - -`_cmutil` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--download_path=value` → `CM_DOWNLOAD_PATH=value` -* `--extra_folder=value` → `CM_EXTRACT_TO_FOLDER=value` -* `--extract_path=value` → `CM_EXTRACT_PATH=value` -* `--from=value` → `CM_DOWNLOAD_LOCAL_FILE_PATH=value` -* `--local_path=value` → `CM_DOWNLOAD_LOCAL_FILE_PATH=value` -* `--store=value` → `CM_DOWNLOAD_PATH=value` -* `--to=value` → `CM_EXTRACT_PATH=value` -* `--url=value` → `CM_DAE_URL=value` -* `--verify=value` → `CM_VERIFY_SSL=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "download_path":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/_cm.json)*** - * download,file - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_DAE_DOWNLOAD_USING_TORRENT': ['yes', 'True']}` - * CM names: `--adr.['download-script']...` - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - 1. ***Run native script if exists*** - 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/_cm.json)*** - * extract,file - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DAE_EXTRACT_DOWNLOADED': ['yes', 'True']}` - * CM names: `--adr.['extract-script']...` - - CM script: [extract-file](https://github.com/mlcommons/cm4mlops/tree/master/script/extract-file) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-and-extract/_cm.json) - -___ -### Script output -`cmr "download-and-extract file [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `<<>>` -* `<<>>` -* `CM_DOWNLOAD_DOWNLOADED_PATH*` -* `CM_EXTRACT_EXTRACTED_PATH` -* `CM_GET_DEPENDENT_CACHED_PATH` -#### New environment keys auto-detected from customize - -* `CM_GET_DEPENDENT_CACHED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/download-file/index.md b/docs/DevOps-automation/download-file/index.md deleted file mode 100644 index 73df26d09..000000000 --- a/docs/DevOps-automation/download-file/index.md +++ /dev/null @@ -1,202 +0,0 @@ -Automatically generated README for this automation recipe: **download-file** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=download-file,9cdc8dc41aae437e) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *download,file* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "download file" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=download,file` - -`cm run script --tags=download,file[,variations] [--input_flags]` - -*or* - -`cmr "download file"` - -`cmr "download file [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'download,file' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="download,file"``` - -#### Run this script via Docker (beta) - -`cm docker script "download file[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_url.#` - - Environment variables: - - *CM_DOWNLOAD_URL*: `#` - - Workflow: - -
    - - - * Group "**download-tool**" -
    - Click here to expand this section. - - * **`_cmutil`** (default) - - Environment variables: - - *CM_DOWNLOAD_TOOL*: `cmutil` - - Workflow: - * `_curl` - - Environment variables: - - *CM_DOWNLOAD_TOOL*: `curl` - - Workflow: - * `_gdown` - - Environment variables: - - *CM_DOWNLOAD_TOOL*: `gdown` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_package.gdown - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_rclone` - - Environment variables: - - *CM_DOWNLOAD_TOOL*: `rclone` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,rclone - - CM script: [get-rclone](https://github.com/mlcommons/cm4mlops/tree/master/script/get-rclone) - * `_wget` - - Environment variables: - - *CM_DOWNLOAD_TOOL*: `wget` - - Workflow: - -
    - - -#### Default variations - -`_cmutil` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--download_path=value` → `CM_DOWNLOAD_PATH=value` -* `--from=value` → `CM_DOWNLOAD_LOCAL_FILE_PATH=value` -* `--local_path=value` → `CM_DOWNLOAD_LOCAL_FILE_PATH=value` -* `--md5sum=value` → `CM_DOWNLOAD_CHECKSUM=value` -* `--store=value` → `CM_DOWNLOAD_PATH=value` -* `--url=value` → `CM_DOWNLOAD_URL=value` -* `--verify=value` → `CM_VERIFY_SSL=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "download_path":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_RCLONE_COPY_USING: `sync` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-file/_cm.json) - -___ -### Script output -`cmr "download file [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `<<>>` -* `CM_DOWNLOAD_DOWNLOADED_PATH` -* `CM_GET_DEPENDENT_CACHED_PATH` -#### New environment keys auto-detected from customize - -* `CM_DOWNLOAD_DOWNLOADED_PATH` -* `CM_GET_DEPENDENT_CACHED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/download-torrent/index.md b/docs/DevOps-automation/download-torrent/index.md deleted file mode 100644 index e14037e1d..000000000 --- a/docs/DevOps-automation/download-torrent/index.md +++ /dev/null @@ -1,155 +0,0 @@ -Automatically generated README for this automation recipe: **download-torrent** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=download-torrent,69b752c5618e45bb) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *download,torrent,download-torrent* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "download torrent download-torrent" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=download,torrent,download-torrent` - -`cm run script --tags=download,torrent,download-torrent[,variations] [--input_flags]` - -*or* - -`cmr "download torrent download-torrent"` - -`cmr "download torrent download-torrent [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'download,torrent,download-torrent' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="download,torrent,download-torrent"``` - -#### Run this script via Docker (beta) - -`cm docker script "download torrent download-torrent[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_torrent.#` - - Environment variables: - - *CM_TORRENT_FILE*: `#` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--wait=value` → `CM_TORRENT_WAIT_UNTIL_COMPLETED=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "wait":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_TORRENT_WAIT_UNTIL_COMPLETED: `no` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/_cm.json)*** - * get,generic-sys-util,_transmission - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/download-torrent/_cm.json) - -___ -### Script output -`cmr "download torrent download-torrent [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `<<>>` -* `CM_TORRENT_DOWNLOADED_PATH` -#### New environment keys auto-detected from customize - -* `CM_TORRENT_DOWNLOADED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/extract-file/index.md b/docs/DevOps-automation/extract-file/index.md deleted file mode 100644 index a9df0d22e..000000000 --- a/docs/DevOps-automation/extract-file/index.md +++ /dev/null @@ -1,168 +0,0 @@ -Automatically generated README for this automation recipe: **extract-file** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=extract-file,3f0b76219d004817) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *extract,file* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "extract file" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=extract,file` - -`cm run script --tags=extract,file[,variations] [--input_flags]` - -*or* - -`cmr "extract file"` - -`cmr "extract file [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'extract,file' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="extract,file"``` - -#### Run this script via Docker (beta) - -`cm docker script "extract file[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_keep` - - Environment variables: - - *CM_EXTRACT_REMOVE_EXTRACTED*: `no` - - Workflow: - * `_no-remove-extracted` - - Environment variables: - - *CM_EXTRACT_REMOVE_EXTRACTED*: `no` - - Workflow: - * `_path.#` - - Environment variables: - - *CM_EXTRACT_FILEPATH*: `#` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--extra_folder=value` → `CM_EXTRACT_TO_FOLDER=value` -* `--extract_path=value` → `CM_EXTRACT_PATH=value` -* `--input=value` → `CM_EXTRACT_FILEPATH=value` -* `--to=value` → `CM_EXTRACT_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "extra_folder":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/extract-file/_cm.json) - -___ -### Script output -`cmr "extract file [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `<<>>` -* `CM_EXTRACT_EXTRACTED_PATH` -* `CM_GET_DEPENDENT_CACHED_PATH` -#### New environment keys auto-detected from customize - -* `CM_EXTRACT_EXTRACTED_PATH` -* `CM_GET_DEPENDENT_CACHED_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/fail/index.md b/docs/DevOps-automation/fail/index.md deleted file mode 100644 index 6784dbba7..000000000 --- a/docs/DevOps-automation/fail/index.md +++ /dev/null @@ -1,132 +0,0 @@ -Automatically generated README for this automation recipe: **fail** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=fail,3aaee82e19d243cd) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *fail,filter* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "fail filter" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=fail,filter` - -`cm run script --tags=fail,filter[,variations] ` - -*or* - -`cmr "fail filter"` - -`cmr "fail filter [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'fail,filter' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="fail,filter"``` - -#### Run this script via Docker (beta) - -`cm docker script "fail filter[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_windows` - - Environment variables: - - *CM_FAIL_WINDOWS*: `True` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/_cm.yaml) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/_cm.yaml) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/fail/_cm.yaml) - -___ -### Script output -`cmr "fail filter [,variations]" -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/get-conda/index.md b/docs/DevOps-automation/get-conda/index.md deleted file mode 100644 index 6999e6a00..000000000 --- a/docs/DevOps-automation/get-conda/index.md +++ /dev/null @@ -1,164 +0,0 @@ -Automatically generated README for this automation recipe: **get-conda** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-conda,6600115f41324c7b) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,conda,get-conda* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get conda get-conda" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,conda,get-conda` - -`cm run script --tags=get,conda,get-conda[,variations] ` - -*or* - -`cmr "get conda get-conda"` - -`cmr "get conda get-conda [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,conda,get-conda' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,conda,get-conda"``` - -#### Run this script via Docker (beta) - -`cm docker script "get conda get-conda[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_name.#` - - Environment variables: - - *CM_CONDA_PREFIX_NAME*: `#` - - Workflow: - -
    - - - * Group "**conda-python**" -
    - Click here to expand this section. - - * `_python-3.#` - - Environment variables: - - *CM_CONDA_PYTHON_VERSION*: `3.#` - - Workflow: - * `_python-3.8` - - Environment variables: - - *CM_CONDA_PYTHON_VERSION*: `3.8` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-conda/_cm.json) - -___ -### Script output -`cmr "get conda get-conda [,variations]" -j` -#### New environment keys (filter) - -* `+LD_LIBRARY_PATH` -* `+PATH` -* `CM_CONDA_BIN_PATH` -* `CM_CONDA_BIN_WITH_PATH` -* `CM_CONDA_LIB_PATH` -* `CM_CONDA_PREFIX` -* `CONDA_PREFIX` -#### New environment keys auto-detected from customize - -* `CM_CONDA_BIN_PATH` -* `CM_CONDA_BIN_WITH_PATH` -* `CM_CONDA_LIB_PATH` -* `CM_CONDA_PREFIX` \ No newline at end of file diff --git a/docs/DevOps-automation/get-git-repo/index.md b/docs/DevOps-automation/get-git-repo/index.md deleted file mode 100644 index 17ecb4b5b..000000000 --- a/docs/DevOps-automation/get-git-repo/index.md +++ /dev/null @@ -1,240 +0,0 @@ -Automatically generated README for this automation recipe: **get-git-repo** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-git-repo,ed603e7292974f10) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,git,repo,repository,clone* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get git repo repository clone" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,git,repo,repository,clone` - -`cm run script --tags=get,git,repo,repository,clone[,variations] [--input_flags]` - -*or* - -`cmr "get git repo repository clone"` - -`cmr "get git repo repository clone [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,git,repo,repository,clone' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,git,repo,repository,clone"``` - -#### Run this script via Docker (beta) - -`cm docker script "get git repo repository clone[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_lfs` - - Environment variables: - - *CM_GIT_REPO_NEEDS_LFS*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic,sys-util,_git-lfs - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * `_no-recurse-submodules` - - Environment variables: - - *CM_GIT_RECURSE_SUBMODULES*: `` - - Workflow: - * `_patch` - - Environment variables: - - *CM_GIT_PATCH*: `yes` - - Workflow: - * `_submodules.#` - - Environment variables: - - *CM_GIT_SUBMODULES*: `#` - - Workflow: - -
    - - - * Group "**checkout**" -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_BRANCH*: `#` - - Workflow: - * `_sha.#` - - Environment variables: - - *CM_GIT_SHA*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - - - * Group "**git-history**" -
    - Click here to expand this section. - - * `_full-history` - - Environment variables: - - *CM_GIT_DEPTH*: `` - - Workflow: - * **`_short-history`** (default) - - Environment variables: - - *CM_GIT_DEPTH*: `--depth 5` - - Workflow: - -
    - - - * Group "**repo**" -
    - Click here to expand this section. - - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - -
    - - -#### Default variations - -`_short-history` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--branch=value` → `CM_GIT_CHECKOUT=value` -* `--depth=value` → `CM_GIT_DEPTH=value` -* `--env_key=value` → `CM_GIT_ENV_KEY=value` -* `--folder=value` → `CM_GIT_CHECKOUT_FOLDER=value` -* `--patch=value` → `CM_GIT_PATCH=value` -* `--submodules=value` → `CM_GIT_RECURSE_SUBMODULES=value` -* `--update=value` → `CM_GIT_REPO_PULL=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "branch":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_DEPTH: `--depth 4` -* CM_GIT_CHECKOUT_FOLDER: `repo` -* CM_GIT_PATCH: `no` -* CM_GIT_RECURSE_SUBMODULES: ` --recurse-submodules` -* CM_GIT_URL: `https://github.com/mlcommons/ck.git` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-git-repo/_cm.json)*** - * pull,git,repo - * Enable this dependency only if all ENV vars are set:
    -`{'CM_GIT_REPO_PULL': ['yes', 'True']}` - * CM names: `--adr.['pull-git-repo']...` - - CM script: [pull-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/pull-git-repo) - -___ -### Script output -`cmr "get git repo repository clone [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `<<>>` -* `CM_GIT_CHECKOUT_PATH` -* `CM_GIT_REPO_*` -#### New environment keys auto-detected from customize - -* `CM_GIT_CHECKOUT_PATH` -* `CM_GIT_REPO_CURRENT_HASH` \ No newline at end of file diff --git a/docs/DevOps-automation/get-github-cli/index.md b/docs/DevOps-automation/get-github-cli/index.md deleted file mode 100644 index fee40cc4e..000000000 --- a/docs/DevOps-automation/get-github-cli/index.md +++ /dev/null @@ -1,120 +0,0 @@ -Automatically generated README for this automation recipe: **get-github-cli** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-github-cli,1417029c6ff44f21) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,gh,gh-cli,github,cli,github-cli* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get gh gh-cli github cli github-cli" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,gh,gh-cli,github,cli,github-cli` - -`cm run script --tags=get,gh,gh-cli,github,cli,github-cli ` - -*or* - -`cmr "get gh gh-cli github cli github-cli"` - -`cmr "get gh gh-cli github cli github-cli " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,gh,gh-cli,github,cli,github-cli' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,gh,gh-cli,github,cli,github-cli"``` - -#### Run this script via Docker (beta) - -`cm docker script "get gh gh-cli github cli github-cli" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-github-cli/_cm.json) - -___ -### Script output -`cmr "get gh gh-cli github cli github-cli " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/index.md b/docs/DevOps-automation/index.md new file mode 100644 index 000000000..9162bc99e --- /dev/null +++ b/docs/DevOps-automation/index.md @@ -0,0 +1,24 @@ +The DevOps automation category contains the following scripts: + +- [benchmark-program](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/benchmark-program/README.md) +- [compile-program](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/compile-program/README.md) +- [convert-csv-to-md](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/convert-csv-to-md/README.md) +- [copy-to-clipboard](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/copy-to-clipboard/README.md) +- [create-conda-env](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/create-conda-env/README.md) +- [create-patch](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/create-patch/README.md) +- [detect-sudo](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/detect-sudo/README.md) +- [download-and-extract](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/download-and-extract/README.md) +- [download-file](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/download-file/README.md) +- [download-torrent](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/download-torrent/README.md) +- [extract-file](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/extract-file/README.md) +- [fail](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/fail/README.md) +- [get-conda](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-conda/README.md) +- [get-git-repo](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-git-repo/README.md) +- [get-github-cli](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-github-cli/README.md) +- [pull-git-repo](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/pull-git-repo/README.md) +- [push-csv-to-spreadsheet](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/push-csv-to-spreadsheet/README.md) +- [set-device-settings-qaic](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/set-device-settings-qaic/README.md) +- [set-echo-off-win](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/set-echo-off-win/README.md) +- [set-performance-mode](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/set-performance-mode/README.md) +- [set-sqlite-dir](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/set-sqlite-dir/README.md) +- [tar-my-folder](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/tar-my-folder/README.md) diff --git a/docs/DevOps-automation/pull-git-repo/index.md b/docs/DevOps-automation/pull-git-repo/index.md deleted file mode 100644 index 63b1e3157..000000000 --- a/docs/DevOps-automation/pull-git-repo/index.md +++ /dev/null @@ -1,134 +0,0 @@ -Automatically generated README for this automation recipe: **pull-git-repo** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=pull-git-repo,c23132ed65c4421d) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *pull,git,repo,repository* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "pull git repo repository" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=pull,git,repo,repository` - -`cm run script --tags=pull,git,repo,repository [--input_flags]` - -*or* - -`cmr "pull git repo repository"` - -`cmr "pull git repo repository " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'pull,git,repo,repository' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="pull,git,repo,repository"``` - -#### Run this script via Docker (beta) - -`cm docker script "pull git repo repository" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--path=value` → `CM_GIT_CHECKOUT_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "path":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/pull-git-repo/_cm.json) - -___ -### Script output -`cmr "pull git repo repository " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/push-csv-to-spreadsheet/index.md b/docs/DevOps-automation/push-csv-to-spreadsheet/index.md deleted file mode 100644 index 124332bbc..000000000 --- a/docs/DevOps-automation/push-csv-to-spreadsheet/index.md +++ /dev/null @@ -1,142 +0,0 @@ -Automatically generated README for this automation recipe: **push-csv-to-spreadsheet** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=push-csv-to-spreadsheet,5ec9e5fa7feb4fff) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "push google-spreadsheet spreadsheet push-to-google-spreadsheet" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet` - -`cm run script --tags=push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet [--input_flags]` - -*or* - -`cmr "push google-spreadsheet spreadsheet push-to-google-spreadsheet"` - -`cmr "push google-spreadsheet spreadsheet push-to-google-spreadsheet " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="push,google-spreadsheet,spreadsheet,push-to-google-spreadsheet"``` - -#### Run this script via Docker (beta) - -`cm docker script "push google-spreadsheet spreadsheet push-to-google-spreadsheet" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--csv_file=value` → `CM_CSV_FILE_PATH=value` -* `--sheet_name=value` → `CM_GOOGLE_SHEET_NAME=value` -* `--spreadsheet_id=value` → `CM_GOOGLE_SPREADSHEET_ID=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "csv_file":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GOOGLE_SPREADSHEET_ID: `1gMHjXmFmwZR4-waPPyxy5Pc3VARqX3kKUWxkP97Xa6Y` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/_cm.json)*** - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_google-api-python-client - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_google-auth-oauthlib - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-csv-to-spreadsheet/_cm.json) - -___ -### Script output -`cmr "push google-spreadsheet spreadsheet push-to-google-spreadsheet " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/set-device-settings-qaic/index.md b/docs/DevOps-automation/set-device-settings-qaic/index.md deleted file mode 100644 index 2033b7695..000000000 --- a/docs/DevOps-automation/set-device-settings-qaic/index.md +++ /dev/null @@ -1,143 +0,0 @@ -Automatically generated README for this automation recipe: **set-device-settings-qaic** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=set-device-settings-qaic,408a1a1563b44780) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "set device qaic ai100 cloud performance power setting mode vc ecc" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc` - -`cm run script --tags=set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc[,variations] ` - -*or* - -`cmr "set device qaic ai100 cloud performance power setting mode vc ecc"` - -`cmr "set device qaic ai100 cloud performance power setting mode vc ecc [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="set,device,qaic,ai100,cloud,performance,power,setting,mode,vc,ecc"``` - -#### Run this script via Docker (beta) - -`cm docker script "set device qaic ai100 cloud performance power setting mode vc ecc[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_ecc` - - Environment variables: - - *CM_QAIC_ECC*: `yes` - - Workflow: - * `_vc.#` - - Environment variables: - - *CM_QAIC_VC*: `#` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_QAIC_DEVICES: `0` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/_cm.json)*** - * detect-os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,qaic,platform,sdk - - CM script: [get-qaic-platform-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-platform-sdk) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-device-settings-qaic/_cm.json) - -___ -### Script output -`cmr "set device qaic ai100 cloud performance power setting mode vc ecc [,variations]" -j` -#### New environment keys (filter) - -* `CM_QAIC_DEVICE_*` -#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/set-echo-off-win/index.md b/docs/DevOps-automation/set-echo-off-win/index.md deleted file mode 100644 index 46e87495c..000000000 --- a/docs/DevOps-automation/set-echo-off-win/index.md +++ /dev/null @@ -1,116 +0,0 @@ -Automatically generated README for this automation recipe: **set-echo-off-win** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=set-echo-off-win,49d94b57524f4fcf) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *set,echo,off,win,echo-off-win,echo-off* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "set echo off win echo-off-win echo-off" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=set,echo,off,win,echo-off-win,echo-off` - -`cm run script --tags=set,echo,off,win,echo-off-win,echo-off ` - -*or* - -`cmr "set echo off win echo-off-win echo-off"` - -`cmr "set echo off win echo-off-win echo-off " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'set,echo,off,win,echo-off-win,echo-off' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="set,echo,off,win,echo-off-win,echo-off"``` - -#### Run this script via Docker (beta) - -`cm docker script "set echo off win echo-off-win echo-off" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-echo-off-win/_cm.json) - -___ -### Script output -`cmr "set echo off win echo-off-win echo-off " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/set-performance-mode/index.md b/docs/DevOps-automation/set-performance-mode/index.md deleted file mode 100644 index 0fd389fe2..000000000 --- a/docs/DevOps-automation/set-performance-mode/index.md +++ /dev/null @@ -1,180 +0,0 @@ -Automatically generated README for this automation recipe: **set-performance-mode** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=set-performance-mode,2c0ab7b64692443d) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *set,system,performance,power,mode* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "set system performance power mode" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=set,system,performance,power,mode` - -`cm run script --tags=set,system,performance,power,mode[,variations] ` - -*or* - -`cmr "set system performance power mode"` - -`cmr "set system performance power mode [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'set,system,performance,power,mode' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="set,system,performance,power,mode"``` - -#### Run this script via Docker (beta) - -`cm docker script "set system performance power mode[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_reproducibility` - - Environment variables: - - *CM_SET_OS_PERFORMANCE_REPRODUCIBILITY_MODE*: `yes` - - Workflow: - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * **`_cpu`** (default) - - Environment variables: - - *CM_SET_PERFORMANCE_MODE_OF*: `cpu` - - Workflow: - -
    - - - * Group "**performance-mode**" -
    - Click here to expand this section. - - * **`_performance`** (default) - - Environment variables: - - *CM_SET_PERFORMANCE_MODE*: `performance` - - Workflow: - -
    - - - * Group "**power**" -
    - Click here to expand this section. - - * `_power` - - Environment variables: - - *CM_SET_PERFORMANCE_MODE*: `power` - - Workflow: - -
    - - -#### Default variations - -`_cpu,_performance` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/_cm.json)*** - * detect-os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect-cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/_cm.json) - 1. ***Run native script if exists*** - * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/run-ubuntu.sh) - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-performance-mode/_cm.json) - -___ -### Script output -`cmr "set system performance power mode [,variations]" -j` -#### New environment keys (filter) - -* `OMP_*` -#### New environment keys auto-detected from customize diff --git a/docs/DevOps-automation/set-sqlite-dir/index.md b/docs/DevOps-automation/set-sqlite-dir/index.md deleted file mode 100644 index ec6e14441..000000000 --- a/docs/DevOps-automation/set-sqlite-dir/index.md +++ /dev/null @@ -1,141 +0,0 @@ -Automatically generated README for this automation recipe: **set-sqlite-dir** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=set-sqlite-dir,05904966355a43ac) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *set,sqlite,dir,sqlite-dir* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "set sqlite dir sqlite-dir" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=set,sqlite,dir,sqlite-dir` - -`cm run script --tags=set,sqlite,dir,sqlite-dir [--input_flags]` - -*or* - -`cmr "set sqlite dir sqlite-dir"` - -`cmr "set sqlite dir sqlite-dir " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'set,sqlite,dir,sqlite-dir' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="set,sqlite,dir,sqlite-dir"``` - -#### Run this script via Docker (beta) - -`cm docker script "set sqlite dir sqlite-dir" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--path=value` → `CM_SQLITE_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "path":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/set-sqlite-dir/_cm.json) - -___ -### Script output -`cmr "set sqlite dir sqlite-dir " [--input_flags] -j` -#### New environment keys (filter) - -* `CM_SQLITE_PATH` -#### New environment keys auto-detected from customize - -* `CM_SQLITE_PATH` \ No newline at end of file diff --git a/docs/DevOps-automation/tar-my-folder/index.md b/docs/DevOps-automation/tar-my-folder/index.md deleted file mode 100644 index 2b3c6bce5..000000000 --- a/docs/DevOps-automation/tar-my-folder/index.md +++ /dev/null @@ -1,133 +0,0 @@ -Automatically generated README for this automation recipe: **tar-my-folder** - -Category: **DevOps automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=tar-my-folder,3784212e986c456b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *run,tar* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run tar" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run,tar` - -`cm run script --tags=run,tar [--input_flags]` - -*or* - -`cmr "run tar"` - -`cmr "run tar " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,tar' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,tar"``` - -#### Run this script via Docker (beta) - -`cm docker script "run tar" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--input_dir=value` → `CM_TAR_INPUT_DIR=value` -* `--outfile=value` → `CM_TAR_OUTFILE=value` -* `--output_dir=value` → `CM_TAR_OUTPUT_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "input_dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/tar-my-folder/_cm.json) - -___ -### Script output -`cmr "run tar " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Docker-automation/build-docker-image/index.md b/docs/Docker-automation/build-docker-image/index.md deleted file mode 100644 index e9eecd61f..000000000 --- a/docs/Docker-automation/build-docker-image/index.md +++ /dev/null @@ -1,160 +0,0 @@ -Automatically generated README for this automation recipe: **build-docker-image** - -Category: **Docker automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=build-docker-image,2c3c4ba2413442e7) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *build,docker,image,docker-image,dockerimage* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "build docker image docker-image dockerimage" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=build,docker,image,docker-image,dockerimage` - -`cm run script --tags=build,docker,image,docker-image,dockerimage [--input_flags]` - -*or* - -`cmr "build docker image docker-image dockerimage"` - -`cmr "build docker image docker-image dockerimage " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'build,docker,image,docker-image,dockerimage' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="build,docker,image,docker-image,dockerimage"``` - -#### Run this script via Docker (beta) - -`cm docker script "build docker image docker-image dockerimage" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--cache=value` → `CM_DOCKER_CACHE=value` -* `--cm_repo=value` → `CM_MLOPS_REPO=value` -* `--docker_os=value` → `CM_DOCKER_OS=value` -* `--docker_os_version=value` → `CM_DOCKER_OS_VERSION=value` -* `--dockerfile=value` → `CM_DOCKERFILE_WITH_PATH=value` -* `--gh_token=value` → `CM_GH_TOKEN=value` -* `--image_name=value` → `CM_DOCKER_IMAGE_NAME=value` -* `--image_repo=value` → `CM_DOCKER_IMAGE_REPO=value` -* `--image_tag=value` → `CM_DOCKER_IMAGE_TAG=value` -* `--post_run_cmds=value` → `CM_DOCKER_POST_RUN_COMMANDS=value` -* `--pre_run_cmds=value` → `CM_DOCKER_PRE_RUN_COMMANDS=value` -* `--push_image=value` → `CM_DOCKER_PUSH_IMAGE=value` -* `--real_run=value` → `CM_REAL_RUN=value` -* `--script_tags=value` → `CM_DOCKER_RUN_SCRIPT_TAGS=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "cache":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_DOCKER_IMAGE_REPO: `local` -* CM_DOCKER_IMAGE_TAG: `latest` - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/_cm.yaml) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/_cm.yaml)*** - * build,dockerfile - * Enable this dependency only if all ENV vars are set:
    -`{'CM_BUILD_DOCKERFILE': ['yes', '1']}` - - CM script: [build-dockerfile](https://github.com/mlcommons/cm4mlops/tree/master/script/build-dockerfile) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-docker-image/_cm.yaml) - -___ -### Script output -`cmr "build docker image docker-image dockerimage " [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DOCKER_*` -#### New environment keys auto-detected from customize - -* `CM_DOCKER_BUILD_ARGS` -* `CM_DOCKER_BUILD_CMD` -* `CM_DOCKER_CACHE_ARG` -* `CM_DOCKER_IMAGE_NAME` -* `CM_DOCKER_IMAGE_REPO` -* `CM_DOCKER_IMAGE_TAG` \ No newline at end of file diff --git a/docs/Docker-automation/build-dockerfile/index.md b/docs/Docker-automation/build-dockerfile/index.md deleted file mode 100644 index 231cdc2f3..000000000 --- a/docs/Docker-automation/build-dockerfile/index.md +++ /dev/null @@ -1,186 +0,0 @@ -Automatically generated README for this automation recipe: **build-dockerfile** - -Category: **Docker automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=build-dockerfile,e66a7483230d4641) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *build,dockerfile* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "build dockerfile" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=build,dockerfile` - -`cm run script --tags=build,dockerfile[,variations] [--input_flags]` - -*or* - -`cmr "build dockerfile"` - -`cmr "build dockerfile [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'build,dockerfile' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="build,dockerfile"``` - -#### Run this script via Docker (beta) - -`cm docker script "build dockerfile[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_slim` - - Environment variables: - - *CM_DOCKER_BUILD_SLIM*: `yes` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--build=value` → `CM_BUILD_DOCKER_IMAGE=value` -* `--cache=value` → `CM_DOCKER_CACHE=value` -* `--cm_repo=value` → `CM_MLOPS_REPO=value` -* `--cm_repo_flags=value` → `CM_DOCKER_ADD_FLAG_TO_CM_MLOPS_REPO=value` -* `--cm_repos=value` → `CM_DOCKER_EXTRA_CM_REPOS=value` -* `--comments=value` → `CM_DOCKER_RUN_COMMENTS=value` -* `--copy_files=value` → `CM_DOCKER_COPY_FILES=value` -* `--docker_base_image=value` → `CM_DOCKER_IMAGE_BASE=value` -* `--docker_os=value` → `CM_DOCKER_OS=value` -* `--docker_os_version=value` → `CM_DOCKER_OS_VERSION=value` -* `--extra_sys_deps=value` → `CM_DOCKER_EXTRA_SYS_DEPS=value` -* `--fake_docker_deps=value` → `CM_DOCKER_FAKE_DEPS=value` -* `--fake_run_option=value` → `CM_DOCKER_FAKE_RUN_OPTION=value` -* `--file_path=value` → `CM_DOCKERFILE_WITH_PATH=value` -* `--gh_token=value` → `CM_GH_TOKEN=value` -* `--image_repo=value` → `CM_DOCKER_IMAGE_REPO=value` -* `--image_tag=value` → `CM_DOCKER_IMAGE_TAG=value` -* `--package_manager_update_cmd=value` → `CM_PACKAGE_MANAGER_UPDATE_CMD=value` -* `--pip_extra_flags=value` → `CM_DOCKER_PIP_INSTALL_EXTRA_FLAGS=value` -* `--post_file=value` → `DOCKER_IMAGE_POST_FILE=value` -* `--post_run_cmds=value` → `CM_DOCKER_POST_RUN_COMMANDS=value` -* `--pre_run_cmds=value` → `CM_DOCKER_PRE_RUN_COMMANDS=value` -* `--push_image=value` → `CM_DOCKER_PUSH_IMAGE=value` -* `--real_run=value` → `CM_REAL_RUN=value` -* `--run_cmd=value` → `CM_DOCKER_RUN_CMD=value` -* `--run_cmd_extra=value` → `CM_DOCKER_RUN_CMD_EXTRA=value` -* `--script_tags=value` → `CM_DOCKER_RUN_SCRIPT_TAGS=value` -* `--skip_cm_sys_upgrade=value` → `CM_DOCKER_SKIP_CM_SYS_UPGRADE=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "build":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_DOCKER_BUILD_SLIM: `no` -* CM_DOCKER_IMAGE_EOL: ` -` -* CM_DOCKER_OS: `ubuntu` - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/_cm.yaml) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/_cm.yaml) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/_cm.yaml) - 1. Run "postrocess" function from customize.py - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-dockerfile/_cm.yaml)*** - * build,docker,image - * Enable this dependency only if all ENV vars are set:
    -`{'CM_BUILD_DOCKER_IMAGE': ['yes', '1']}` - * CM names: `--adr.['build-docker-image']...` - - CM script: [build-docker-image](https://github.com/mlcommons/cm4mlops/tree/master/script/build-docker-image) - -___ -### Script output -`cmr "build dockerfile [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DOCKERFILE_*` -#### New environment keys auto-detected from customize - -* `CM_DOCKERFILE_WITH_PATH` \ No newline at end of file diff --git a/docs/Docker-automation/index.md b/docs/Docker-automation/index.md new file mode 100644 index 000000000..477716d36 --- /dev/null +++ b/docs/Docker-automation/index.md @@ -0,0 +1,6 @@ +The Docker automation category contains the following scripts: + +- [build-docker-image](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/build-docker-image/README.md) +- [build-dockerfile](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/build-dockerfile/README.md) +- [prune-docker](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/prune-docker/README.md) +- [run-docker-container](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/run-docker-container/README.md) diff --git a/docs/Docker-automation/prune-docker/index.md b/docs/Docker-automation/prune-docker/index.md deleted file mode 100644 index 513cc894b..000000000 --- a/docs/Docker-automation/prune-docker/index.md +++ /dev/null @@ -1,118 +0,0 @@ -Automatically generated README for this automation recipe: **prune-docker** - -Category: **Docker automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=prune-docker,27ead88809bb4d4e) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *prune,docker* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "prune docker" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=prune,docker` - -`cm run script --tags=prune,docker ` - -*or* - -`cmr "prune docker"` - -`cmr "prune docker " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'prune,docker' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="prune,docker"``` - -#### Run this script via Docker (beta) - -`cm docker script "prune docker" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/_cm.json) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prune-docker/_cm.json) - -___ -### Script output -`cmr "prune docker " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Docker-automation/run-docker-container/index.md b/docs/Docker-automation/run-docker-container/index.md deleted file mode 100644 index 312e03f9f..000000000 --- a/docs/Docker-automation/run-docker-container/index.md +++ /dev/null @@ -1,166 +0,0 @@ -Automatically generated README for this automation recipe: **run-docker-container** - -Category: **Docker automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-docker-container,1e0c884107514b46) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *run,docker,container* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run docker container" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run,docker,container` - -`cm run script --tags=run,docker,container [--input_flags]` - -*or* - -`cmr "run docker container"` - -`cmr "run docker container " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,docker,container' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,docker,container"``` - -#### Run this script via Docker (beta) - -`cm docker script "run docker container" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--all_gpus=value` → `CM_DOCKER_ADD_ALL_GPUS=value` -* `--base=value` → `CM_DOCKER_IMAGE_BASE=value` -* `--cache=value` → `CM_DOCKER_CACHE=value` -* `--cm_repo=value` → `CM_MLOPS_REPO=value` -* `--detached=value` → `CM_DOCKER_DETACHED_MODE=value` -* `--device=value` → `CM_DOCKER_ADD_DEVICE=value` -* `--docker_image_base=value` → `CM_DOCKER_IMAGE_BASE=value` -* `--docker_os=value` → `CM_DOCKER_OS=value` -* `--docker_os_version=value` → `CM_DOCKER_OS_VERSION=value` -* `--extra_run_args=value` → `CM_DOCKER_EXTRA_RUN_ARGS=value` -* `--fake_run_option=value` → `CM_DOCKER_FAKE_RUN_OPTION=value` -* `--gh_token=value` → `CM_GH_TOKEN=value` -* `--image_name=value` → `CM_DOCKER_IMAGE_NAME=value` -* `--image_repo=value` → `CM_DOCKER_IMAGE_REPO=value` -* `--image_tag=value` → `CM_DOCKER_IMAGE_TAG=value` -* `--image_tag_extra=value` → `CM_DOCKER_IMAGE_TAG_EXTRA=value` -* `--interactive=value` → `CM_DOCKER_INTERACTIVE_MODE=value` -* `--it=value` → `CM_DOCKER_INTERACTIVE=value` -* `--mounts=value` → `CM_DOCKER_VOLUME_MOUNTS=value` -* `--pass_user_group=value` → `CM_DOCKER_PASS_USER_GROUP=value` -* `--port_maps=value` → `CM_DOCKER_PORT_MAPS=value` -* `--post_run_cmds=value` → `CM_DOCKER_POST_RUN_COMMANDS=value` -* `--pre_run_cmds=value` → `CM_DOCKER_PRE_RUN_COMMANDS=value` -* `--real_run=value` → `CM_REAL_RUN=value` -* `--recreate=value` → `CM_DOCKER_IMAGE_RECREATE=value` -* `--run_cmd=value` → `CM_DOCKER_RUN_CMD=value` -* `--run_cmd_extra=value` → `CM_DOCKER_RUN_CMD_EXTRA=value` -* `--save_script=value` → `CM_DOCKER_SAVE_SCRIPT=value` -* `--script_tags=value` → `CM_DOCKER_RUN_SCRIPT_TAGS=value` -* `--shm_size=value` → `CM_DOCKER_SHM_SIZE=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "all_gpus":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_DOCKER_DETACHED_MODE: `yes` - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/_cm.yaml) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/_cm.yaml)*** - * build,docker,image - * Skip this dependenecy only if any of ENV vars are set:
    -`{'CM_DOCKER_IMAGE_EXISTS': ['yes'], 'CM_DOCKER_SKIP_BUILD': ['yes']}` - * CM names: `--adr.['build-docker-image']...` - - CM script: [build-docker-image](https://github.com/mlcommons/cm4mlops/tree/master/script/build-docker-image) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-docker-container/_cm.yaml) - -___ -### Script output -`cmr "run docker container " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/GUI/gui/index.md b/docs/GUI/gui/index.md deleted file mode 100644 index c68754f51..000000000 --- a/docs/GUI/gui/index.md +++ /dev/null @@ -1,243 +0,0 @@ -Automatically generated README for this automation recipe: **gui** - -Category: **GUI** - -License: **Apache 2.0** - -Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=gui,605cac42514a4c69) ]* - ---- - -This CM script provides a unified GUI to run CM scripts using [Streamlit library](https://streamlit.io). - -If you want to run it in a cloud (Azure, AWS, GCP), you need to open some port and test that you can reach it from outside. - -By default, streamlit uses port 8501 but you can change it as follows: - -```bash -cm run script "cm gui" --port 80 -``` - -If you have troubles accessing this port, use this simple python module to test if your port is open: -```bash -python3 -m http.server 80 -``` - - - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *cm,gui,cm-gui,script-gui,cm-script-gui,streamlit* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "cm gui cm-gui script-gui cm-script-gui streamlit" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=cm,gui,cm-gui,script-gui,cm-script-gui,streamlit` - -`cm run script --tags=cm,gui,cm-gui,script-gui,cm-script-gui,streamlit[,variations] [--input_flags]` - -*or* - -`cmr "cm gui cm-gui script-gui cm-script-gui streamlit"` - -`cmr "cm gui cm-gui script-gui cm-script-gui streamlit [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - - -#### Input Flags - -* --**script**=script tags -* --**app**=gui app - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "script":...} -``` -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'cm,gui,cm-gui,script-gui,cm-script-gui,streamlit' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="cm,gui,cm-gui,script-gui,cm-script-gui,streamlit"``` - -#### Run this script via Docker (beta) - -`cm docker script "cm gui cm-gui script-gui cm-script-gui streamlit[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**app**" -
    - Click here to expand this section. - - * `_chatgpt` - - Environment variables: - - *CM_GUI_APP*: `chatgpt` - - Workflow: - * `_graph` - - Environment variables: - - *CM_GUI_APP*: `graph` - - Workflow: - 1. ***Read "prehook_deps" on other CM scripts*** - * get,generic-python-lib,_matplotlib - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_mpld3 - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_main` - - Environment variables: - - *CM_GUI_APP*: `app` - - Workflow: - * `_playground` - - Environment variables: - - *CM_GUI_APP*: `playground` - - Workflow: - 1. ***Read "prehook_deps" on other CM scripts*** - * get,generic-python-lib,_matplotlib - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_mpld3 - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_streamlit_option_menu - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pandas - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.plotly - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.streamlit-aggrid - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--address=value` → `CM_GUI_ADDRESS=value` -* `--app=value` → `CM_GUI_APP=value` -* `--exp_key_c=value` → `CM_GUI_GRAPH_EXPERIMENT_AXIS_KEY_C=value` -* `--exp_key_s=value` → `CM_GUI_GRAPH_EXPERIMENT_AXIS_KEY_S=value` -* `--exp_key_x=value` → `CM_GUI_GRAPH_EXPERIMENT_AXIS_KEY_X=value` -* `--exp_key_y=value` → `CM_GUI_GRAPH_EXPERIMENT_AXIS_KEY_Y=value` -* `--exp_max_results=value` → `CM_GUI_GRAPH_EXPERIMENT_MAX_RESULTS=value` -* `--exp_name=value` → `CM_GUI_GRAPH_EXPERIMENT_NAME=value` -* `--exp_tags=value` → `CM_GUI_GRAPH_EXPERIMENT_TAGS=value` -* `--exp_title=value` → `CM_GUI_GRAPH_EXPERIMENT_TITLE=value` -* `--exp_uid=value` → `CM_GUI_GRAPH_EXPERIMENT_RESULT_UID=value` -* `--no_browser=value` → `CM_GUI_NO_BROWSER=value` -* `--no_run=value` → `CM_GUI_NO_RUN=value` -* `--port=value` → `CM_GUI_PORT=value` -* `--prefix=value` → `CM_GUI_SCRIPT_PREFIX_LINUX=value` -* `--script=value` → `CM_GUI_SCRIPT_TAGS=value` -* `--title=value` → `CM_GUI_TITLE=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "address":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GUI_EXTRA_CMD: `` -* CM_GUI_SCRIPT_PREFIX_LINUX: `gnome-terminal --` -* CM_GUI_APP: `app` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_cmind - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_streamlit - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/_cm.yaml) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/gui/_cm.yaml) - -___ -### Script output -`cmr "cm gui cm-gui script-gui cm-script-gui streamlit [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/GUI/index.md b/docs/GUI/index.md new file mode 100644 index 000000000..255e21d92 --- /dev/null +++ b/docs/GUI/index.md @@ -0,0 +1,3 @@ +The GUI category contains the following scripts: + +- [gui](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/gui/README.md) diff --git a/docs/Legacy-CK-support/get-ck-repo-mlops/index.md b/docs/Legacy-CK-support/get-ck-repo-mlops/index.md deleted file mode 100644 index 6977a1460..000000000 --- a/docs/Legacy-CK-support/get-ck-repo-mlops/index.md +++ /dev/null @@ -1,120 +0,0 @@ -Automatically generated README for this automation recipe: **get-ck-repo-mlops** - -Category: **Legacy CK support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ck-repo-mlops,d3a619b8186e4f74) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ck-repo,mlops,ck-repo-mlops* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ck-repo mlops ck-repo-mlops" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ck-repo,mlops,ck-repo-mlops` - -`cm run script --tags=get,ck-repo,mlops,ck-repo-mlops ` - -*or* - -`cmr "get ck-repo mlops ck-repo-mlops"` - -`cmr "get ck-repo mlops ck-repo-mlops " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ck-repo,mlops,ck-repo-mlops' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ck-repo,mlops,ck-repo-mlops"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ck-repo mlops ck-repo-mlops" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/_cm.json)*** - * get,ck - - CM script: [get-ck](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ck) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck-repo-mlops/_cm.json) - -___ -### Script output -`cmr "get ck-repo mlops ck-repo-mlops " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Legacy-CK-support/get-ck/index.md b/docs/Legacy-CK-support/get-ck/index.md deleted file mode 100644 index 29a213ab1..000000000 --- a/docs/Legacy-CK-support/get-ck/index.md +++ /dev/null @@ -1,118 +0,0 @@ -Automatically generated README for this automation recipe: **get-ck** - -Category: **Legacy CK support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ck,5575126797174cac) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ck,ck-framework* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ck ck-framework" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ck,ck-framework` - -`cm run script --tags=get,ck,ck-framework ` - -*or* - -`cmr "get ck ck-framework"` - -`cmr "get ck ck-framework " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ck,ck-framework' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ck,ck-framework"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ck ck-framework" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/_cm.json) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ck/_cm.json) - -___ -### Script output -`cmr "get ck ck-framework " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Legacy-CK-support/index.md b/docs/Legacy-CK-support/index.md new file mode 100644 index 000000000..37d68f777 --- /dev/null +++ b/docs/Legacy-CK-support/index.md @@ -0,0 +1,4 @@ +The Legacy CK support category contains the following scripts: + +- [get-ck](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ck/README.md) +- [get-ck-repo-mlops](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ck-repo-mlops/README.md) diff --git a/docs/MLPerf-benchmark-support/add-custom-nvidia-system/index.md b/docs/MLPerf-benchmark-support/add-custom-nvidia-system/index.md deleted file mode 100644 index 64bf4f9e6..000000000 --- a/docs/MLPerf-benchmark-support/add-custom-nvidia-system/index.md +++ /dev/null @@ -1,175 +0,0 @@ -Automatically generated README for this automation recipe: **add-custom-nvidia-system** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=add-custom-nvidia-system,b2e6c46c6e8745a3) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *add,custom,system,nvidia* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "add custom system nvidia" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=add,custom,system,nvidia` - -`cm run script --tags=add,custom,system,nvidia[,variations] ` - -*or* - -`cmr "add custom system nvidia"` - -`cmr "add custom system nvidia [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'add,custom,system,nvidia' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="add,custom,system,nvidia"``` - -#### Run this script via Docker (beta) - -`cm docker script "add custom system nvidia[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**code**" -
    - Click here to expand this section. - - * `_ctuning` - - Workflow: - * `_custom` - - Workflow: - * `_mlcommons` - - Workflow: - * `_nvidia-only` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -* `r2.1` -* `r3.0` -* `r3.1` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,cuda,_cudnn - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,tensorrt - - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) - * get,cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * get,generic-python-lib,_requests - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic,sys-util,_glog-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_gflags-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_libre2-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_libnuma-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_libboost-all-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_rapidjson-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,nvidia,mlperf,inference,common-code - * CM names: `--adr.['nvidia-inference-common-code']...` - - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) - * get,generic-python-lib,_pycuda - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/_cm.yaml) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/add-custom-nvidia-system/_cm.yaml) - -___ -### Script output -`cmr "add custom system nvidia [,variations]" -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation/index.md b/docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation/index.md deleted file mode 100644 index d84308de4..000000000 --- a/docs/MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation/index.md +++ /dev/null @@ -1,268 +0,0 @@ -Automatically generated README for this automation recipe: **benchmark-any-mlperf-inference-implementation** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=benchmark-any-mlperf-inference-implementation,8d3cd46f54464810) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models` - -`cm run script --tags=benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models[,variations] [--input_flags]` - -*or* - -`cmr "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models"` - -`cmr "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="benchmark,run,natively,all,inference,any,mlperf,mlperf-implementation,implementation,mlperf-models"``` - -#### Run this script via Docker (beta) - -`cm docker script "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_aws-dl2q.24xlarge,qualcomm` - - Workflow: - * `_mini,power` - - Workflow: - * `_orin,power` - - Workflow: - * `_phoenix,nvidia` - - Workflow: - * `_phoenix,power` - - Workflow: - * `_phoenix,reference` - - Workflow: - * `_rb6,power` - - Workflow: - * `_rb6,qualcomm` - - Workflow: - * `_rpi4,power` - - Workflow: - * `_sapphire-rapids.24c,nvidia` - - Workflow: - -
    - - - * Group "**implementation**" -
    - Click here to expand this section. - - * `_deepsparse` - - Environment variables: - - *DIVISION*: `open` - - *IMPLEMENTATION*: `deepsparse` - - Workflow: - * `_intel` - - Environment variables: - - *IMPLEMENTATION*: `intel` - - Workflow: - * `_mil` - - Environment variables: - - *IMPLEMENTATION*: `mil` - - Workflow: - * `_nvidia` - - Environment variables: - - *IMPLEMENTATION*: `nvidia-original` - - Workflow: - * `_qualcomm` - - Environment variables: - - *IMPLEMENTATION*: `qualcomm` - - Workflow: - * `_reference` - - Environment variables: - - *IMPLEMENTATION*: `reference` - - Workflow: - * `_tflite-cpp` - - Environment variables: - - *IMPLEMENTATION*: `tflite_cpp` - - Workflow: - -
    - - - * Group "**power**" -
    - Click here to expand this section. - - * **`_performance-only`** (default) - - Workflow: - * `_power` - - Environment variables: - - *POWER*: `True` - - Workflow: - -
    - - - * Group "**sut**" -
    - Click here to expand this section. - - * `_aws-dl2q.24xlarge` - - Workflow: - * `_macbookpro-m1` - - Environment variables: - - *CATEGORY*: `edge` - - *DIVISION*: `closed` - - Workflow: - * `_mini` - - Workflow: - * `_orin` - - Workflow: - * `_orin.32g` - - Environment variables: - - *CATEGORY*: `edge` - - *DIVISION*: `closed` - - Workflow: - * `_phoenix` - - Environment variables: - - *CATEGORY*: `edge` - - *DIVISION*: `closed` - - Workflow: - * `_rb6` - - Workflow: - * `_rpi4` - - Workflow: - * `_sapphire-rapids.24c` - - Environment variables: - - *CATEGORY*: `edge` - - *DIVISION*: `closed` - - Workflow: - -
    - - -#### Default variations - -`_performance-only` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--backends=value` → `BACKENDS=value` -* `--category=value` → `CATEGORY=value` -* `--devices=value` → `DEVICES=value` -* `--division=value` → `DIVISION=value` -* `--extra_args=value` → `EXTRA_ARGS=value` -* `--models=value` → `MODELS=value` -* `--power_server=value` → `POWER_SERVER=value` -* `--power_server_port=value` → `POWER_SERVER_PORT=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "backends":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* DIVISION: `open` -* CATEGORY: `edge` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/_cm.yaml)*** - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/_cm.yaml) - 1. ***Run native script if exists*** - * [run-template.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/run-template.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-any-mlperf-inference-implementation/_cm.yaml) - -___ -### Script output -`cmr "benchmark run natively all inference any mlperf mlperf-implementation implementation mlperf-models [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia/index.md b/docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia/index.md deleted file mode 100644 index d97de8054..000000000 --- a/docs/MLPerf-benchmark-support/build-mlperf-inference-server-nvidia/index.md +++ /dev/null @@ -1,248 +0,0 @@ -Automatically generated README for this automation recipe: **build-mlperf-inference-server-nvidia** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=build-mlperf-inference-server-nvidia,f37403af5e9f4541) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "build mlcommons mlperf inference inference-server server nvidia-harness nvidia" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia` - -`cm run script --tags=build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia[,variations] [--input_flags]` - -*or* - -`cmr "build mlcommons mlperf inference inference-server server nvidia-harness nvidia"` - -`cmr "build mlcommons mlperf inference inference-server server nvidia-harness nvidia [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="build,mlcommons,mlperf,inference,inference-server,server,nvidia-harness,nvidia"``` - -#### Run this script via Docker (beta) - -`cm docker script "build mlcommons mlperf inference inference-server server nvidia-harness nvidia[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**code**" -
    - Click here to expand this section. - - * **`_ctuning`** (default) - - Workflow: - * `_custom` - - Workflow: - * `_mlcommons` - - Workflow: - * `_nvidia-only` - - Workflow: - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * `_cpu` - - Environment variables: - - *CM_MLPERF_DEVICE*: `cpu` - - Workflow: - * **`_cuda`** (default) - - Environment variables: - - *CM_MLPERF_DEVICE*: `cuda` - - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` - - Workflow: - * `_inferentia` - - Environment variables: - - *CM_MLPERF_DEVICE*: `inferentia` - - Workflow: - -
    - - -#### Default variations - -`_ctuning,_cuda` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--clean=value` → `CM_MAKE_CLEAN=value` -* `--custom_system=value` → `CM_CUSTOM_SYSTEM_NVIDIA=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "clean":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MAKE_BUILD_COMMAND: `build` -* CM_MAKE_CLEAN: `no` -* CM_CUSTOM_SYSTEM_NVIDIA: `yes` - -
    - -#### Versions -Default version: `r3.1` - -* `r2.1` -* `r3.0` -* `r3.1` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,cuda,_cudnn - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_DEVICE': ['cuda', 'inferentia']}` - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,tensorrt,_dev - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_DEVICE': ['cuda', 'inferentia']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_TENSORRT_SYSTEM_DETECT': [True]}` - * CM names: `--adr.['tensorrt']...` - - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) - * get,gcc - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - * get,cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * get,generic,sys-util,_glog-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_gflags-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_libgmock-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_libre2-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_libnuma-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_libboost-all-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_rapidjson-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,nvidia,mlperf,inference,common-code - * CM names: `--adr.['nvidia-inference-common-code']...` - - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) - * get,generic-python-lib,_package.pybind11 - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pycuda - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_RUN_STATE_DOCKER': ['yes', True, 'True']}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_opencv-python - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_nvidia-dali - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,mlperf,inference,nvidia,scratch,space - * CM names: `--adr.['nvidia-scratch-space']...` - - CM script: [get-mlperf-inference-nvidia-scratch-space](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-scratch-space) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/_cm.yaml) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/build-mlperf-inference-server-nvidia/_cm.yaml)*** - * add,custom,system,nvidia - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CUSTOM_SYSTEM_NVIDIA': ['no', False, 'False']}` - * CM names: `--adr.['custom-system-nvidia', 'nvidia-inference-common-code']...` - - CM script: [add-custom-nvidia-system](https://github.com/mlcommons/cm4mlops/tree/master/script/add-custom-nvidia-system) - -___ -### Script output -`cmr "build mlcommons mlperf inference inference-server server nvidia-harness nvidia [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_MLPERF_INFERENCE_NVIDIA_CODE_PATH` -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-inference-submission/index.md b/docs/MLPerf-benchmark-support/generate-mlperf-inference-submission/index.md deleted file mode 100644 index e3a085fab..000000000 --- a/docs/MLPerf-benchmark-support/generate-mlperf-inference-submission/index.md +++ /dev/null @@ -1,191 +0,0 @@ -Automatically generated README for this automation recipe: **generate-mlperf-inference-submission** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-mlperf-inference-submission,5f8ab2d0b5874d53) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission` - -`cm run script --tags=generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission [--input_flags]` - -*or* - -`cmr "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission"` - -`cmr "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="generate,submission,mlperf,mlperf-inference,inference,mlcommons,inference-submission,mlperf-inference-submission,mlcommons-inference-submission"``` - -#### Run this script via Docker (beta) - -`cm docker script "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--analyzer_settings_file=value` → `CM_MLPERF_POWER_ANALYZER_SETTINGS_FILE_PATH=value` -* `--category=value` → `CM_MLPERF_SUBMISSION_CATEGORY=value` -* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` -* `--dashboard=value` → `CM_MLPERF_DASHBOARD=value` -* `--dashboard_wb_project=value` → `CM_MLPERF_DASHBOARD_WANDB_PROJECT=value` -* `--device=value` → `CM_MLPERF_DEVICE=value` -* `--division=value` → `CM_MLPERF_SUBMISSION_DIVISION=value` -* `--duplicate=value` → `CM_MLPERF_DUPLICATE_SCENARIO_RESULTS=value` -* `--hw_name=value` → `CM_HW_NAME=value` -* `--hw_notes_extra=value` → `CM_MLPERF_SUT_HW_NOTES_EXTRA=value` -* `--infer_scenario_results=value` → `CM_MLPERF_DUPLICATE_SCENARIO_RESULTS=value` -* `--power_settings_file=value` → `CM_MLPERF_POWER_SETTINGS_FILE_PATH=value` -* `--preprocess=value` → `CM_RUN_MLPERF_SUBMISSION_PREPROCESSOR=value` -* `--preprocess_submission=value` → `CM_RUN_MLPERF_SUBMISSION_PREPROCESSOR=value` -* `--results_dir=value` → `CM_MLPERF_INFERENCE_RESULTS_DIR_=value` -* `--run_checker=value` → `CM_RUN_SUBMISSION_CHECKER=value` -* `--run_style=value` → `CM_MLPERF_RUN_STYLE=value` -* `--skip_truncation=value` → `CM_SKIP_TRUNCATE_ACCURACY=value` -* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` -* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` -* `--sw_notes_extra=value` → `CM_MLPERF_SUT_SW_NOTES_EXTRA=value` -* `--tar=value` → `CM_TAR_SUBMISSION_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "analyzer_settings_file":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_RUN_MLPERF_ACCURACY: `on` -* CM_MLPERF_RUN_STYLE: `valid` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/_cm.json)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,sut,system-description - - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) - * install,pip-package,for-cmind-python,_package.tabulate - - CM script: [install-pip-package-for-cmind-python](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pip-package-for-cmind-python) - * get,mlperf,inference,utils - - CM script: [get-mlperf-inference-utils](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-utils) - * get,mlperf,results,dir - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_INFERENCE_RESULTS_DIR_': ['on']}` - * CM names: `--adr.['get-mlperf-results-dir']...` - - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) - * get,mlperf,submission,dir - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` - * CM names: `--adr.['get-mlperf-submission-dir']...` - - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-submission/_cm.json)*** - * accuracy,truncate,mlc - * Enable this dependency only if all ENV vars are set:
    -`{'CM_RUN_MLPERF_ACCURACY': ['on']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_SKIP_TRUNCATE_ACCURACY': ['yes']}` - - CM script: [truncate-mlperf-inference-accuracy-log](https://github.com/mlcommons/cm4mlops/tree/master/script/truncate-mlperf-inference-accuracy-log) - * preprocess,mlperf,submission - * Enable this dependency only if all ENV vars are set:
    -`{'CM_RUN_MLPERF_SUBMISSION_PREPROCESSOR': ['on', 'True', 'yes', True]}` - - CM script: [preprocess-mlperf-inference-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/preprocess-mlperf-inference-submission) - * submission,inference,checker,mlc - * Enable this dependency only if all ENV vars are set:
    -`{'CM_RUN_SUBMISSION_CHECKER': ['yes']}` - * CM names: `--adr.['mlperf-inference-submission-checker', 'submission-checker']...` - - CM script: [run-mlperf-inference-submission-checker](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-inference-submission-checker) - -___ -### Script output -`cmr "generate submission mlperf mlperf-inference inference mlcommons inference-submission mlperf-inference-submission mlcommons-inference-submission " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf/index.md b/docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf/index.md deleted file mode 100644 index f248f0b50..000000000 --- a/docs/MLPerf-benchmark-support/generate-mlperf-inference-user-conf/index.md +++ /dev/null @@ -1,199 +0,0 @@ -Automatically generated README for this automation recipe: **generate-mlperf-inference-user-conf** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Developers: [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Thomas Zhu](https://www.linkedin.com/in/hanwen-zhu-483614189), [Grigori Fursin](https://cKnowledge.org/gfursin) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-mlperf-inference-user-conf,3af4475745964b93) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *generate,mlperf,inference,user-conf,inference-user-conf* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "generate mlperf inference user-conf inference-user-conf" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=generate,mlperf,inference,user-conf,inference-user-conf` - -`cm run script --tags=generate,mlperf,inference,user-conf,inference-user-conf [--input_flags]` - -*or* - -`cmr "generate mlperf inference user-conf inference-user-conf"` - -`cmr "generate mlperf inference user-conf inference-user-conf " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'generate,mlperf,inference,user-conf,inference-user-conf' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="generate,mlperf,inference,user-conf,inference-user-conf"``` - -#### Run this script via Docker (beta) - -`cm docker script "generate mlperf inference user-conf inference-user-conf" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` -* `--hw_name=value` → `CM_HW_NAME=value` -* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` -* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` -* `--num_threads=value` → `CM_NUM_THREADS=value` -* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` -* `--output_dir=value` → `OUTPUT_BASE_DIR=value` -* `--performance_sample_count=value` → `CM_MLPERF_PERFORMANCE_SAMPLE_COUNT=value` -* `--power=value` → `CM_MLPERF_POWER=value` -* `--regenerate_files=value` → `CM_REGENERATE_MEASURE_FILES=value` -* `--rerun=value` → `CM_RERUN=value` -* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` -* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` -* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` -* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` -* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` -* `--test_query_count=value` → `CM_TEST_QUERY_COUNT=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "count":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_LOADGEN_MODE: `accuracy` -* CM_MLPERF_LOADGEN_SCENARIO: `Offline` -* CM_OUTPUT_FOLDER_NAME: `test_results` -* CM_MLPERF_RUN_STYLE: `test` -* CM_TEST_QUERY_COUNT: `10` -* CM_FAST_FACTOR: `5` -* CM_MLPERF_QUANTIZATION: `False` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,python - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlperf,results,dir - * Skip this dependenecy only if all ENV vars are set:
    -`{'OUTPUT_BASE_DIR': ['on']}` - * CM names: `--adr.['get-mlperf-results-dir']...` - - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,sut,configs - - CM script: [get-mlperf-inference-sut-configs](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-configs) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/_cm.yaml) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/_cm.yaml) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-inference-user-conf/_cm.yaml) - -___ -### Script output -`cmr "generate mlperf inference user-conf inference-user-conf " [--input_flags] -j` -#### New environment keys (filter) - -* `CM_HW_*` -* `CM_LOGS_DIR` -* `CM_MAX_EXAMPLES` -* `CM_MLPERF_*` -* `CM_SUT_*` -#### New environment keys auto-detected from customize - -* `CM_LOGS_DIR` -* `CM_MAX_EXAMPLES` -* `CM_MLPERF_ACCURACY_RESULTS_DIR` -* `CM_MLPERF_COMPLIANCE_RUN_POSTPONED` -* `CM_MLPERF_CONF` -* `CM_MLPERF_INFERENCE_AUDIT_PATH` -* `CM_MLPERF_INFERENCE_FINAL_RESULTS_DIR` -* `CM_MLPERF_INFERENCE_MIN_DURATION` -* `CM_MLPERF_LOADGEN_LOGS_DIR` -* `CM_MLPERF_LOADGEN_MODE` -* `CM_MLPERF_LOADGEN_QUERY_COUNT` -* `CM_MLPERF_LOADGEN_SCENARIO` -* `CM_MLPERF_LOADGEN_TARGET_LATENCY` -* `CM_MLPERF_LOADGEN_TARGET_QPS` -* `CM_MLPERF_OUTPUT_DIR` -* `CM_MLPERF_POWER_LOG_DIR` -* `CM_MLPERF_RANGING_USER_CONF` -* `CM_MLPERF_RUN_STYLE` -* `CM_MLPERF_SKIP_RUN` -* `CM_MLPERF_TESTING_USER_CONF` -* `CM_MLPERF_USER_CONF` -* `CM_MLPERF_USE_MAX_DURATION` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-tiny-report/index.md b/docs/MLPerf-benchmark-support/generate-mlperf-tiny-report/index.md deleted file mode 100644 index c2df0a411..000000000 --- a/docs/MLPerf-benchmark-support/generate-mlperf-tiny-report/index.md +++ /dev/null @@ -1,145 +0,0 @@ -Automatically generated README for this automation recipe: **generate-mlperf-tiny-report** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-mlperf-tiny-report,709c3f3f9b3e4783) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *generate,mlperf,tiny,mlperf-tiny,report* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "generate mlperf tiny mlperf-tiny report" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=generate,mlperf,tiny,mlperf-tiny,report` - -`cm run script --tags=generate,mlperf,tiny,mlperf-tiny,report [--input_flags]` - -*or* - -`cmr "generate mlperf tiny mlperf-tiny report"` - -`cmr "generate mlperf tiny mlperf-tiny report " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'generate,mlperf,tiny,mlperf-tiny,report' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="generate,mlperf,tiny,mlperf-tiny,report"``` - -#### Run this script via Docker (beta) - -`cm docker script "generate mlperf tiny mlperf-tiny report" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--repo_tags=value` → `CM_IMPORT_TINYMLPERF_REPO_TAGS=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "repo_tags":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_IMPORT_TINYMLPERF_REPO_TAGS: `1.1-private` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_xlsxwriter - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pandas - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/_cm.yaml) - 1. ***Run native script if exists*** - * [run_submission_checker.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/run_submission_checker.bat) - * [run_submission_checker.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/run_submission_checker.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-report/_cm.yaml) - -___ -### Script output -`cmr "generate mlperf tiny mlperf-tiny report " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission/index.md b/docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission/index.md deleted file mode 100644 index 8e6de9427..000000000 --- a/docs/MLPerf-benchmark-support/generate-mlperf-tiny-submission/index.md +++ /dev/null @@ -1,414 +0,0 @@ -Automatically generated README for this automation recipe: **generate-mlperf-tiny-submission** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-mlperf-tiny-submission,04289b9fc07b42b6) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission` - -`cm run script --tags=generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission ` - -*or* - -`cmr "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission"` - -`cmr "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="generate,submission,mlperf,mlperf-tiny,tiny,mlcommons,tiny-submission,mlperf-tiny-submission,mlcommons-tiny-submission"``` - -#### Run this script via Docker (beta) - -`cm docker script "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/_cm.json)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,sut,system-description - - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-mlperf-tiny-submission/_cm.json)*** - * - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_RUN_STYLE': ['valid']}` - - CM script: [outdated-test-abtf-ssd-pytorch](outdated-test-abtf-ssd-pytorch) - - CM script: [run-how-to-run-server](https://github.com/how-to-run/server/tree/master/script/run-how-to-run-server) - - CM script: [get-dataset-cognata-mlcommons](https://github.com/mlcommons/cm4abtf/tree/master/script/get-dataset-cognata-mlcommons) - - CM script: [get-ml-model-abtf-ssd-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/get-ml-model-abtf-ssd-pytorch) - - CM script: [test-ssd-resnet50-cognata-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/test-ssd-resnet50-cognata-pytorch) - - CM script: [reproduce-ieee-acm-micro2023-paper-22](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-22) - - CM script: [reproduce-ieee-acm-micro2023-paper-28](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-28) - - CM script: [reproduce-ieee-acm-micro2023-paper-33](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-33) - - CM script: [reproduce-ieee-acm-micro2023-paper-38](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-38) - - CM script: [reproduce-ieee-acm-micro2023-paper-5](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-5) - - CM script: [reproduce-ieee-acm-micro2023-paper-8](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-8) - - CM script: [reproduce-ieee-acm-micro2023-paper-85](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-85) - - CM script: [reproduce-ieee-acm-micro2023-paper-87](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-87) - - CM script: [reproduce-ieee-acm-micro2023-paper-96](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ieee-acm-micro2023-paper-96) - - CM script: [reproduce-ipol-paper-2022-439a](https://github.com/ctuning/cm4research/tree/master/script/reproduce-ipol-paper-2022-439a) - - CM script: [reproduce-neurips-paper-2022-arxiv-2204.09656](https://github.com/ctuning/cm4research/tree/master/script/reproduce-neurips-paper-2022-arxiv-2204.09656) - - CM script: [process-mlperf-inference-results](https://github.com/mlcommons/cm4mlperf-results/tree/master/script/process-mlperf-inference-results) - - CM script: [get-target-device](https://github.com/cknowledge/cm4research/tree/master/script/get-target-device) - - CM script: [run-refiners-hello-world](https://github.com/cknowledge/cm4research/tree/master/script/run-refiners-hello-world) - - CM script: [test-mlperf-loadgen-cpp](https://github.com/cknowledge/cm4research/tree/master/script/test-mlperf-loadgen-cpp) - - CM script: [test-mlperf-loadgen-py](https://github.com/cknowledge/cm4research/tree/master/script/test-mlperf-loadgen-py) - - CM script: [test-onnxruntime-cpp](https://github.com/cknowledge/cm4research/tree/master/script/test-onnxruntime-cpp) - - CM script: [app-generate-image-dalle-mini-jax-py](https://github.com/cknowledge/cm4research-private/tree/master/script/app-generate-image-dalle-mini-jax-py) - - CM script: [app-generate-image-stable-diffusion2-pytorch-cuda-py](https://github.com/cknowledge/cm4research-private/tree/master/script/app-generate-image-stable-diffusion2-pytorch-cuda-py) - - CM script: [app-image-classification-onnx-py-ck](https://github.com/cknowledge/cm4research-private/tree/master/script/app-image-classification-onnx-py-ck) - - CM script: [app-image-corner-detection-old](https://github.com/cknowledge/cm4research-private/tree/master/script/app-image-corner-detection-old) - - CM script: [app-mlperf-inference-nvidia](https://github.com/cknowledge/cm4research-private/tree/master/script/app-mlperf-inference-nvidia) - - CM script: [app-stable-diffusion-pytorch-cuda-py](https://github.com/cknowledge/cm4research-private/tree/master/script/app-stable-diffusion-pytorch-cuda-py) - - CM script: [get-axs](https://github.com/cknowledge/cm4research-private/tree/master/script/get-axs) - - CM script: [gui-llm](https://github.com/cknowledge/cm4research-private/tree/master/script/gui-llm) - - CM script: [not-needed--get-android-cmdline-tools](https://github.com/cknowledge/cm4research-private/tree/master/script/not-needed--get-android-cmdline-tools) - - CM script: [not-needed--install-android-cmdline-tools](https://github.com/cknowledge/cm4research-private/tree/master/script/not-needed--install-android-cmdline-tools) - - CM script: [test-script1](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script1) - - CM script: [test-script2](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script2) - - CM script: [test-script3](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script3) - - CM script: [test-script4](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script4) - - CM script: [test-script5](https://github.com/cknowledge/cm4research-private/tree/master/script/test-script5) - - CM script: [activate-python-venv](https://github.com/mlcommons/cm4mlops/tree/master/script/activate-python-venv) - - CM script: [add-custom-nvidia-system](https://github.com/mlcommons/cm4mlops/tree/master/script/add-custom-nvidia-system) - - CM script: [app-image-classification-onnx-py](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-classification-onnx-py) - - CM script: [app-image-classification-tf-onnx-cpp](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-classification-tf-onnx-cpp) - - CM script: [app-image-classification-torch-py](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-classification-torch-py) - - CM script: [app-image-classification-tvm-onnx-py](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-classification-tvm-onnx-py) - - CM script: [app-image-corner-detection](https://github.com/mlcommons/cm4mlops/tree/master/script/app-image-corner-detection) - - CM script: [app-loadgen-generic-python](https://github.com/mlcommons/cm4mlops/tree/master/script/app-loadgen-generic-python) - - CM script: [app-mlperf-inference](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference) - - CM script: [app-mlperf-inference-ctuning-cpp-tflite](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-ctuning-cpp-tflite) - - CM script: [app-mlperf-inference-dummy](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-dummy) - - CM script: [app-mlperf-inference-intel](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-intel) - - CM script: [app-mlperf-inference-mlcommons-cpp](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-mlcommons-cpp) - - CM script: [app-mlperf-inference-mlcommons-python](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-mlcommons-python) - - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) - - CM script: [app-mlperf-inference-qualcomm](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-qualcomm) - - CM script: [app-mlperf-training-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-training-nvidia) - - CM script: [app-mlperf-training-reference](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-training-reference) - - CM script: [app-stable-diffusion-onnx-py](https://github.com/mlcommons/cm4mlops/tree/master/script/app-stable-diffusion-onnx-py) - - CM script: [benchmark-any-mlperf-inference-implementation](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-any-mlperf-inference-implementation) - - CM script: [benchmark-program](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program) - - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) - - CM script: [build-docker-image](https://github.com/mlcommons/cm4mlops/tree/master/script/build-docker-image) - - CM script: [build-dockerfile](https://github.com/mlcommons/cm4mlops/tree/master/script/build-dockerfile) - - CM script: [build-mlperf-inference-server-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/build-mlperf-inference-server-nvidia) - - CM script: [calibrate-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/calibrate-model-for.qaic) - - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) - - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) - - CM script: [convert-csv-to-md](https://github.com/mlcommons/cm4mlops/tree/master/script/convert-csv-to-md) - - CM script: [convert-ml-model-huggingface-to-onnx](https://github.com/mlcommons/cm4mlops/tree/master/script/convert-ml-model-huggingface-to-onnx) - - CM script: [copy-to-clipboard](https://github.com/mlcommons/cm4mlops/tree/master/script/copy-to-clipboard) - - CM script: [create-conda-env](https://github.com/mlcommons/cm4mlops/tree/master/script/create-conda-env) - - CM script: [create-fpgaconvnet-app-tinyml](https://github.com/mlcommons/cm4mlops/tree/master/script/create-fpgaconvnet-app-tinyml) - - CM script: [create-fpgaconvnet-config-tinyml](https://github.com/mlcommons/cm4mlops/tree/master/script/create-fpgaconvnet-config-tinyml) - - CM script: [create-patch](https://github.com/mlcommons/cm4mlops/tree/master/script/create-patch) - - CM script: [destroy-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/destroy-terraform) - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - - CM script: [detect-sudo](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-sudo) - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - - CM script: [download-torrent](https://github.com/mlcommons/cm4mlops/tree/master/script/download-torrent) - - CM script: [dump-pip-freeze](https://github.com/mlcommons/cm4mlops/tree/master/script/dump-pip-freeze) - - CM script: [extract-file](https://github.com/mlcommons/cm4mlops/tree/master/script/extract-file) - - CM script: [fail](https://github.com/mlcommons/cm4mlops/tree/master/script/fail) - - CM script: [flash-tinyml-binary](https://github.com/mlcommons/cm4mlops/tree/master/script/flash-tinyml-binary) - - CM script: [generate-mlperf-inference-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-submission) - - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) - - CM script: [generate-mlperf-tiny-report](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-tiny-report) - - CM script: [generate-mlperf-tiny-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-tiny-submission) - - CM script: [generate-nvidia-engine](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-nvidia-engine) - - CM script: [get-android-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-android-sdk) - - CM script: [get-aocl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aocl) - - CM script: [get-aria2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aria2) - - CM script: [get-aws-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/get-aws-cli) - - CM script: [get-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bazel) - - CM script: [get-bert-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-bert-squad-vocab) - - CM script: [get-blis](https://github.com/mlcommons/cm4mlops/tree/master/script/get-blis) - - CM script: [get-brew](https://github.com/mlcommons/cm4mlops/tree/master/script/get-brew) - - CM script: [get-ck](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ck) - - CM script: [get-ck-repo-mlops](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ck-repo-mlops) - - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - - CM script: [get-cmsis_5](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmsis_5) - - CM script: [get-compiler-flags](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-flags) - - CM script: [get-compiler-rust](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-rust) - - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) - - CM script: [get-croissant](https://github.com/mlcommons/cm4mlops/tree/master/script/get-croissant) - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - - CM script: [get-cuda-devices](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda-devices) - - CM script: [get-cudnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cudnn) - - CM script: [get-dataset-cifar10](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cifar10) - - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) - - CM script: [get-dataset-coco](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-coco) - - CM script: [get-dataset-coco2014](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-coco2014) - - CM script: [get-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-criteo) - - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) - - CM script: [get-dataset-imagenet-calibration](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-calibration) - - CM script: [get-dataset-imagenet-helper](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-helper) - - CM script: [get-dataset-imagenet-train](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-train) - - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) - - CM script: [get-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-kits19) - - CM script: [get-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-librispeech) - - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) - - CM script: [get-dataset-openimages-annotations](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-annotations) - - CM script: [get-dataset-openimages-calibration](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-calibration) - - CM script: [get-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openorca) - - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) - - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) - - CM script: [get-dlrm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dlrm) - - CM script: [get-dlrm-data-mlperf-inference](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dlrm-data-mlperf-inference) - - CM script: [get-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/get-docker) - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - - CM script: [get-github-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/get-github-cli) - - CM script: [get-go](https://github.com/mlcommons/cm4mlops/tree/master/script/get-go) - - CM script: [get-google-saxml](https://github.com/mlcommons/cm4mlops/tree/master/script/get-google-saxml) - - CM script: [get-google-test](https://github.com/mlcommons/cm4mlops/tree/master/script/get-google-test) - - CM script: [get-ipol-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ipol-src) - - CM script: [get-java](https://github.com/mlcommons/cm4mlops/tree/master/script/get-java) - - CM script: [get-javac](https://github.com/mlcommons/cm4mlops/tree/master/script/get-javac) - - CM script: [get-lib-armnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-armnn) - - CM script: [get-lib-dnnl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-dnnl) - - CM script: [get-lib-protobuf](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-protobuf) - - CM script: [get-lib-qaic-api](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-qaic-api) - - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) - - CM script: [get-microtvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-microtvm) - - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) - - CM script: [get-ml-model-bert-base-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-base-squad) - - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) - - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) - - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) - - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) - - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) - - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) - - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) - - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) - - CM script: [get-ml-model-retinanet-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet-nvidia) - - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) - - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) - - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) - - CM script: [get-ml-model-using-imagenet-from-model-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-using-imagenet-from-model-zoo) - - CM script: [get-mlperf-inference-intel-scratch-space](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-intel-scratch-space) - - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) - - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) - - CM script: [get-mlperf-inference-nvidia-scratch-space](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-scratch-space) - - CM script: [get-mlperf-inference-results](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results) - - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) - - CM script: [get-mlperf-inference-sut-configs](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-configs) - - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) - - CM script: [get-mlperf-inference-utils](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-utils) - - CM script: [get-mlperf-logging](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-logging) - - CM script: [get-mlperf-power-dev](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-power-dev) - - CM script: [get-mlperf-tiny-eembc-energy-runner-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-tiny-eembc-energy-runner-src) - - CM script: [get-mlperf-tiny-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-tiny-src) - - CM script: [get-mlperf-training-nvidia-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-nvidia-code) - - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) - - CM script: [get-nvidia-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/get-nvidia-docker) - - CM script: [get-nvidia-mitten](https://github.com/mlcommons/cm4mlops/tree/master/script/get-nvidia-mitten) - - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) - - CM script: [get-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-openssl) - - CM script: [get-preprocessed-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-criteo) - - CM script: [get-preprocesser-script-generic](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocesser-script-generic) - - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) - - CM script: [get-preprocessed-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-kits19) - - CM script: [get-preprocessed-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-librispeech) - - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) - - CM script: [get-preprocessed-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openorca) - - CM script: [get-preprocessed-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-squad) - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - - CM script: [get-qaic-apps-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-apps-sdk) - - CM script: [get-qaic-platform-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-platform-sdk) - - CM script: [get-qaic-software-kit](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-software-kit) - - CM script: [get-rclone](https://github.com/mlcommons/cm4mlops/tree/master/script/get-rclone) - - CM script: [get-rocm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-rocm) - - CM script: [get-spec-ptd](https://github.com/mlcommons/cm4mlops/tree/master/script/get-spec-ptd) - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - - CM script: [get-sys-utils-min](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-min) - - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) - - CM script: [get-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/get-terraform) - - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) - - CM script: [get-tvm-model](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm-model) - - CM script: [get-xilinx-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-xilinx-sdk) - - CM script: [get-zendnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zendnn) - - CM script: [get-zephyr](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr) - - CM script: [get-zephyr-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr-sdk) - - CM script: [gui](https://github.com/mlcommons/cm4mlops/tree/master/script/gui) - - CM script: [import-mlperf-inference-to-experiment](https://github.com/mlcommons/cm4mlops/tree/master/script/import-mlperf-inference-to-experiment) - - CM script: [import-mlperf-tiny-to-experiment](https://github.com/mlcommons/cm4mlops/tree/master/script/import-mlperf-tiny-to-experiment) - - CM script: [import-mlperf-training-to-experiment](https://github.com/mlcommons/cm4mlops/tree/master/script/import-mlperf-training-to-experiment) - - CM script: [install-aws-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/install-aws-cli) - - CM script: [install-bazel](https://github.com/mlcommons/cm4mlops/tree/master/script/install-bazel) - - CM script: [install-cmake-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cmake-prebuilt) - - CM script: [install-cuda-package-manager](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cuda-package-manager) - - CM script: [install-cuda-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-cuda-prebuilt) - - CM script: [install-gcc-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-gcc-src) - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - - CM script: [install-gflags](https://github.com/mlcommons/cm4mlops/tree/master/script/install-gflags) - - CM script: [install-github-cli](https://github.com/mlcommons/cm4mlops/tree/master/script/install-github-cli) - - CM script: [install-ipex-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-ipex-from-src) - - CM script: [install-llvm-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-prebuilt) - - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) - - CM script: [install-mlperf-logging-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-mlperf-logging-from-src) - - CM script: [install-nccl-libs](https://github.com/mlcommons/cm4mlops/tree/master/script/install-nccl-libs) - - CM script: [install-numactl-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-numactl-from-src) - - CM script: [install-onednn-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-onednn-from-src) - - CM script: [install-onnxruntime-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-onnxruntime-from-src) - - CM script: [install-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/install-openssl) - - CM script: [install-pip-package-for-cmind-python](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pip-package-for-cmind-python) - - CM script: [install-python-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-python-src) - - CM script: [install-python-venv](https://github.com/mlcommons/cm4mlops/tree/master/script/install-python-venv) - - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) - - CM script: [install-pytorch-kineto-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-kineto-from-src) - - CM script: [install-qaic-compute-sdk-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-qaic-compute-sdk-from-src) - - CM script: [install-rocm](https://github.com/mlcommons/cm4mlops/tree/master/script/install-rocm) - - CM script: [install-tensorflow-for-c](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-for-c) - - CM script: [install-tensorflow-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-from-src) - - CM script: [install-terraform-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-terraform-from-src) - - CM script: [install-tflite-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tflite-from-src) - - CM script: [install-torchvision-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-torchvision-from-src) - - CM script: [install-tpp-pytorch-extension](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tpp-pytorch-extension) - - CM script: [install-transformers-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-transformers-from-src) - - CM script: [launch-benchmark](https://github.com/mlcommons/cm4mlops/tree/master/script/launch-benchmark) - - CM script: [prepare-training-data-bert](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-bert) - - CM script: [prepare-training-data-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-resnet) - - CM script: [preprocess-mlperf-inference-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/preprocess-mlperf-inference-submission) - - CM script: [print-croissant-desc](https://github.com/mlcommons/cm4mlops/tree/master/script/print-croissant-desc) - - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) - - CM script: [print-hello-world-java](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world-java) - - CM script: [print-hello-world-javac](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world-javac) - - CM script: [print-hello-world-py](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world-py) - - CM script: [print-python-version](https://github.com/mlcommons/cm4mlops/tree/master/script/print-python-version) - - CM script: [process-ae-users](https://github.com/mlcommons/cm4mlops/tree/master/script/process-ae-users) - - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) - - CM script: [prune-bert-models](https://github.com/mlcommons/cm4mlops/tree/master/script/prune-bert-models) - - CM script: [prune-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/prune-docker) - - CM script: [publish-results-to-dashboard](https://github.com/mlcommons/cm4mlops/tree/master/script/publish-results-to-dashboard) - - CM script: [pull-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/pull-git-repo) - - CM script: [push-csv-to-spreadsheet](https://github.com/mlcommons/cm4mlops/tree/master/script/push-csv-to-spreadsheet) - - CM script: [push-mlperf-inference-results-to-github](https://github.com/mlcommons/cm4mlops/tree/master/script/push-mlperf-inference-results-to-github) - - CM script: [remote-run-commands](https://github.com/mlcommons/cm4mlops/tree/master/script/remote-run-commands) - - CM script: [reproduce-ipol-paper-2022-439](https://github.com/mlcommons/cm4mlops/tree/master/script/reproduce-ipol-paper-2022-439) - - CM script: [reproduce-mlperf-octoml-tinyml-results](https://github.com/mlcommons/cm4mlops/tree/master/script/reproduce-mlperf-octoml-tinyml-results) - - CM script: [reproduce-mlperf-training-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/reproduce-mlperf-training-nvidia) - - CM script: [run-docker-container](https://github.com/mlcommons/cm4mlops/tree/master/script/run-docker-container) - - CM script: [run-mlperf-inference-app](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-inference-app) - - CM script: [run-mlperf-inference-mobilenet-models](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-inference-mobilenet-models) - - CM script: [run-mlperf-inference-submission-checker](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-inference-submission-checker) - - CM script: [run-mlperf-power-client](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-power-client) - - CM script: [run-mlperf-power-server](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-power-server) - - CM script: [run-mlperf-training-submission-checker](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-training-submission-checker) - - CM script: [run-python](https://github.com/mlcommons/cm4mlops/tree/master/script/run-python) - - CM script: [run-terraform](https://github.com/mlcommons/cm4mlops/tree/master/script/run-terraform) - - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) - - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) - - CM script: [set-echo-off-win](https://github.com/mlcommons/cm4mlops/tree/master/script/set-echo-off-win) - - CM script: [set-performance-mode](https://github.com/mlcommons/cm4mlops/tree/master/script/set-performance-mode) - - CM script: [set-sqlite-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/set-sqlite-dir) - - CM script: [set-venv](https://github.com/mlcommons/cm4mlops/tree/master/script/set-venv) - - CM script: [tar-my-folder](https://github.com/mlcommons/cm4mlops/tree/master/script/tar-my-folder) - - CM script: [test-deps-conditions](https://github.com/mlcommons/cm4mlops/tree/master/script/test-deps-conditions) - - CM script: [test-download-and-extract-artifacts](https://github.com/mlcommons/cm4mlops/tree/master/script/test-download-and-extract-artifacts) - - CM script: [test-mlperf-inference-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/test-mlperf-inference-retinanet) - - CM script: [test-set-sys-user-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/test-set-sys-user-cm) - - CM script: [truncate-mlperf-inference-accuracy-log](https://github.com/mlcommons/cm4mlops/tree/master/script/truncate-mlperf-inference-accuracy-log) - - CM script: [upgrade-python-pip](https://github.com/mlcommons/cm4mlops/tree/master/script/upgrade-python-pip) - - CM script: [wrapper-reproduce-octoml-tinyml-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/wrapper-reproduce-octoml-tinyml-submission) - -___ -### Script output -`cmr "generate submission mlperf mlperf-tiny tiny mlcommons tiny-submission mlperf-tiny-submission mlcommons-tiny-submission " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/generate-nvidia-engine/index.md b/docs/MLPerf-benchmark-support/generate-nvidia-engine/index.md deleted file mode 100644 index ca8540fe6..000000000 --- a/docs/MLPerf-benchmark-support/generate-nvidia-engine/index.md +++ /dev/null @@ -1,244 +0,0 @@ -Automatically generated README for this automation recipe: **generate-nvidia-engine** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=generate-nvidia-engine,0eef9f05b272401f) ]* - ---- - -This CM script is in draft stage - - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *generate,engine,mlperf,inference,nvidia* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "generate engine mlperf inference nvidia" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=generate,engine,mlperf,inference,nvidia` - -`cm run script --tags=generate,engine,mlperf,inference,nvidia[,variations] [--input_flags]` - -*or* - -`cmr "generate engine mlperf inference nvidia"` - -`cmr "generate engine mlperf inference nvidia [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'generate,engine,mlperf,inference,nvidia' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="generate,engine,mlperf,inference,nvidia"``` - -#### Run this script via Docker (beta) - -`cm docker script "generate engine mlperf inference nvidia[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_batch_size.#` - - Environment variables: - - *CM_MODEL_BATCH_SIZE*: `None` - - Workflow: - * `_copy_streams.#` - - Environment variables: - - *CM_GPU_COPY_STREAMS*: `None` - - Workflow: - * `_cuda` - - Environment variables: - - *CM_MLPERF_DEVICE*: `gpu` - - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` - - Workflow: - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * **`_cpu`** (default) - - Environment variables: - - *CM_MLPERF_DEVICE*: `cpu` - - Workflow: - -
    - - - * Group "**model**" -
    - Click here to expand this section. - - * **`_resnet50`** (default) - - Environment variables: - - *CM_MODEL*: `resnet50` - - Workflow: - * `_retinanet` - - Environment variables: - - *CM_MODEL*: `retinanet` - - Workflow: - -
    - - -#### Default variations - -`_cpu,_resnet50` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "output_dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_BATCH_COUNT: `1` -* CM_BATCH_SIZE: `1` -* CM_LOADGEN_SCENARIO: `Offline` -* CM_GPU_COPY_STREAMS: `1` -* CM_TENSORRT_WORKSPACE_SIZE: `4194304` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,cuda,_cudnn - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,tensorrt - - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pycuda - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,loadgen - * CM names: `--adr.['loadgen']...` - - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,nvidia,mlperf,inference,common-code - * CM names: `--adr.['nvidia-inference-common-code']...` - - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) - * get,dataset,preprocessed,imagenet,_NCHW - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - * CM names: `--adr.['imagenet-preprocessed']...` - - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) - * get,ml-model,resnet50,_onnx - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - * CM names: `--adr.['ml-model', 'resnet50-model']...` - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - * get,dataset,preprocessed,openimages,_validation,_NCHW - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - * CM names: `--adr.['openimages-preprocessed']...` - - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) - * get,ml-model,retinanet,_onnx,_fp32 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - * CM names: `--adr.['ml-model', 'retinanet-model']...` - - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/_cm.yaml) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/generate-nvidia-engine/_cm.yaml) - -___ -### Script output -`cmr "generate engine mlperf inference nvidia [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -* `CM_MLPERF_*` -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space/index.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space/index.md deleted file mode 100644 index 64717e3ce..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space/index.md +++ /dev/null @@ -1,161 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-inference-intel-scratch-space** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-intel-scratch-space,e83fca30851f45ef) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,intel,scratch,space* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get mlperf inference intel scratch space" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,mlperf,inference,intel,scratch,space` - -`cm run script --tags=get,mlperf,inference,intel,scratch,space[,variations] [--input_flags]` - -*or* - -`cmr "get mlperf inference intel scratch space"` - -`cmr "get mlperf inference intel scratch space [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,mlperf,inference,intel,scratch,space' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,mlperf,inference,intel,scratch,space"``` - -#### Run this script via Docker (beta) - -`cm docker script "get mlperf inference intel scratch space[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**version**" -
    - Click here to expand this section. - - * `_version.#` - - Environment variables: - - *CM_INTEL_SCRATCH_SPACE_VERSION*: `#` - - Workflow: - * **`_version.4_0`** (default) - - Environment variables: - - *CM_INTEL_SCRATCH_SPACE_VERSION*: `4_0` - - Workflow: - -
    - - -#### Default variations - -`_version.4_0` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--scratch_path=value` → `MLPERF_INTEL_SCRATCH_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "scratch_path":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-intel-scratch-space/_cm.json) - -___ -### Script output -`cmr "get mlperf inference intel scratch space [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_INTEL_MLPERF_SCRATCH_PATH` -* `CM_INTEL_SCRATCH_SPACE_VERSION` -#### New environment keys auto-detected from customize - -* `CM_INTEL_MLPERF_SCRATCH_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen/index.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen/index.md deleted file mode 100644 index 979a870b0..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-inference-loadgen/index.md +++ /dev/null @@ -1,224 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-inference-loadgen** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-loadgen,64c3d98d0ba04950) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *get,loadgen,inference,inference-loadgen,mlperf,mlcommons* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get loadgen inference inference-loadgen mlperf mlcommons" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,loadgen,inference,inference-loadgen,mlperf,mlcommons` - -`cm run script --tags=get,loadgen,inference,inference-loadgen,mlperf,mlcommons[,variations] ` - -*or* - -`cmr "get loadgen inference inference-loadgen mlperf mlcommons"` - -`cmr "get loadgen inference inference-loadgen mlperf mlcommons [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,loadgen,inference,inference-loadgen,mlperf,mlcommons' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,loadgen,inference,inference-loadgen,mlperf,mlcommons"``` - -#### Run this script via Docker (beta) - -`cm docker script "get loadgen inference inference-loadgen mlperf mlcommons[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_copy` - - Workflow: - * `_custom-python` - - Environment variables: - - *CM_TMP_USE_CUSTOM_PYTHON*: `on` - - Workflow: - * `_download` - - Environment variables: - - *CM_DOWNLOAD_CHECKSUM*: `af3f9525965b2c1acc348fb882a5bfd1` - - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD*: `YES` - - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD_URL*: `https://www.dropbox.com/scl/fi/36dgoiur26i2tvwgsaatf/loadgen.zip?rlkey=ab68i7uza9anvaw0hk1xvf0qk&dl=0` - - *CM_MLPERF_INFERENCE_LOADGEN_VERSION*: `v3.1` - - *CM_VERIFY_SSL*: `False` - - Workflow: - * `_download_v3.1` - - Environment variables: - - *CM_DOWNLOAD_CHECKSUM*: `af3f9525965b2c1acc348fb882a5bfd1` - - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD*: `YES` - - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD_URL*: `https://www.dropbox.com/scl/fi/36dgoiur26i2tvwgsaatf/loadgen.zip?rlkey=ab68i7uza9anvaw0hk1xvf0qk&dl=0` - - *CM_MLPERF_INFERENCE_LOADGEN_VERSION*: `v3.1` - - *CM_VERIFY_SSL*: `False` - - Workflow: - * `_download_v4.0` - - Environment variables: - - *CM_DOWNLOAD_CHECKSUM*: `b4d97525d9ad0539a64667f2a3ca20c5` - - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD*: `YES` - - *CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD_URL*: `https://www.dropbox.com/scl/fi/gk5e9kziju5t56umxyzyx/loadgen.zip?rlkey=vsie4xnzml1inpjplm5cg7t54&dl=0` - - *CM_MLPERF_INFERENCE_LOADGEN_VERSION*: `v4.0` - - *CM_VERIFY_SSL*: `False` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_SHARED_BUILD: `no` - -
    - -#### Versions -Default version: `master` - -* `custom` -* `main` -* `master` -* `pybind_fix` -* `r2.1` -* `r3.0` -* `r3.1` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlcommons,inference,src - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD': ['YES']}` - * CM names: `--adr.['inference-src-loadgen']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * download-and-extract,file,_wget,_extract - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_INFERENCE_LOADGEN_DOWNLOAD': ['YES']}` - * CM names: `--adr.['inference-src-loadgen-download']...` - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - * get,compiler - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_HOST_OS_TYPE': ['windows']}` - * CM names: `--adr.['compiler']...` - - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) - * get,cl - * Enable this dependency only if all ENV vars are set:
    -`{'CM_HOST_OS_TYPE': ['windows']}` - * CM names: `--adr.['compiler']...` - - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) - * get,cmake - * CM names: `--adr.['cmake']...` - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * get,generic-python-lib,_package.wheel - * CM names: `--adr.['pip-package', 'wheel']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pip - * CM names: `--adr.['pip-package', 'pip']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.pybind11 - * CM names: `--adr.['pip-package', 'pybind11']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.setuptools - * CM names: `--adr.['pip-package', 'setuputils']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-loadgen/_cm.yaml) - -___ -### Script output -`cmr "get loadgen inference inference-loadgen mlperf mlcommons [,variations]" -j` -#### New environment keys (filter) - -* `+CPLUS_INCLUDE_PATH` -* `+C_INCLUDE_PATH` -* `+DYLD_FALLBACK_LIBRARY_PATH` -* `+LD_LIBRARY_PATH` -* `+PYTHONPATH` -* `CM_MLPERF_INFERENCE_LOADGEN_*` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_INFERENCE_LOADGEN_INCLUDE_PATH` -* `CM_MLPERF_INFERENCE_LOADGEN_INSTALL_PATH` -* `CM_MLPERF_INFERENCE_LOADGEN_LIBRARY_PATH` -* `CM_MLPERF_INFERENCE_LOADGEN_PYTHON_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code/index.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code/index.md deleted file mode 100644 index 81faf43e8..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code/index.md +++ /dev/null @@ -1,150 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-inference-nvidia-common-code** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-nvidia-common-code,26b78bf3ffdc4926) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,nvidia,mlperf,inference,common-code* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get nvidia mlperf inference common-code" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,nvidia,mlperf,inference,common-code` - -`cm run script --tags=get,nvidia,mlperf,inference,common-code[,variations] ` - -*or* - -`cmr "get nvidia mlperf inference common-code"` - -`cmr "get nvidia mlperf inference common-code [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,nvidia,mlperf,inference,common-code' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,nvidia,mlperf,inference,common-code"``` - -#### Run this script via Docker (beta) - -`cm docker script "get nvidia mlperf inference common-code[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**repo-owner**" -
    - Click here to expand this section. - - * `_ctuning` - - Workflow: - * `_custom` - - Workflow: - * `_mlcommons` - - Workflow: - * `_nvidia-only` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `r3.1` - -* `r2.1` -* `r3.0` -* `r3.1` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/_cm.json)*** - * get,mlperf,inference,results - * CM names: `--adr.['mlperf-inference-results']...` - - CM script: [get-mlperf-inference-results](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results) - - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-common-code/_cm.json) - -___ -### Script output -`cmr "get nvidia mlperf inference common-code [,variations]" -j` -#### New environment keys (filter) - -* `+PYTHONPATH` -* `CM_MLPERF_INFERENCE_NVIDIA_CODE_PATH` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_INFERENCE_NVIDIA_CODE_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space/index.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space/index.md deleted file mode 100644 index 845f71038..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space/index.md +++ /dev/null @@ -1,162 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-inference-nvidia-scratch-space** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-nvidia-scratch-space,0b2bec8b29fb4ab7) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,nvidia,scratch,space* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get mlperf inference nvidia scratch space" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,mlperf,inference,nvidia,scratch,space` - -`cm run script --tags=get,mlperf,inference,nvidia,scratch,space[,variations] [--input_flags]` - -*or* - -`cmr "get mlperf inference nvidia scratch space"` - -`cmr "get mlperf inference nvidia scratch space [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,mlperf,inference,nvidia,scratch,space' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,mlperf,inference,nvidia,scratch,space"``` - -#### Run this script via Docker (beta) - -`cm docker script "get mlperf inference nvidia scratch space[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**version**" -
    - Click here to expand this section. - - * `_version.#` - - Environment variables: - - *CM_NVIDIA_SCRATCH_SPACE_VERSION*: `#` - - Workflow: - * **`_version.4_0`** (default) - - Environment variables: - - *CM_NVIDIA_SCRATCH_SPACE_VERSION*: `4_0` - - Workflow: - -
    - - -#### Default variations - -`_version.4_0` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--scratch_path=value` → `CM_NVIDIA_MLPERF_SCRATCH_PATH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "scratch_path":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-nvidia-scratch-space/_cm.json) - -___ -### Script output -`cmr "get mlperf inference nvidia scratch space [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_NVIDIA_MLPERF_SCRATCH_PATH` -* `CM_NVIDIA_SCRATCH_SPACE_VERSION` -* `MLPERF_SCRATCH_PATH` -#### New environment keys auto-detected from customize - -* `CM_NVIDIA_MLPERF_SCRATCH_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir/index.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir/index.md deleted file mode 100644 index 13e935209..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-inference-results-dir/index.md +++ /dev/null @@ -1,159 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-inference-results-dir** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-results-dir,84f3c5aad5e1444b) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,results,dir,directory* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get mlperf inference results dir directory" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,mlperf,inference,results,dir,directory` - -`cm run script --tags=get,mlperf,inference,results,dir,directory[,variations] [--input_flags]` - -*or* - -`cmr "get mlperf inference results dir directory"` - -`cmr "get mlperf inference results dir directory [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,mlperf,inference,results,dir,directory' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,mlperf,inference,results,dir,directory"``` - -#### Run this script via Docker (beta) - -`cm docker script "get mlperf inference results dir directory[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**version**" -
    - Click here to expand this section. - - * `_version.#` - - Environment variables: - - *CM_MLPERF_INFERENCE_RESULTS_VERSION*: `#` - - Workflow: - * **`_version.4_0`** (default) - - Environment variables: - - *CM_MLPERF_INFERENCE_RESULTS_VERSION*: `4_0` - - Workflow: - -
    - - -#### Default variations - -`_version.4_0` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--results_dir=value` → `CM_MLPERF_INFERENCE_RESULTS_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "results_dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results-dir/_cm.json) - -___ -### Script output -`cmr "get mlperf inference results dir directory [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_MLPERF_INFERENCE_RESULTS_DIR` -* `CM_MLPERF_INFERENCE_RESULTS_VERSION` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_INFERENCE_RESULTS_DIR` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-results/index.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-results/index.md deleted file mode 100644 index 75b92569e..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-inference-results/index.md +++ /dev/null @@ -1,163 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-inference-results** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-results,36bae5b25dbe41da) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,results,inference,inference-results,mlcommons,mlperf* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get results inference inference-results mlcommons mlperf" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,results,inference,inference-results,mlcommons,mlperf` - -`cm run script --tags=get,results,inference,inference-results,mlcommons,mlperf[,variations] ` - -*or* - -`cmr "get results inference inference-results mlcommons mlperf"` - -`cmr "get results inference inference-results mlcommons mlperf [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,results,inference,inference-results,mlcommons,mlperf' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,results,inference,inference-results,mlcommons,mlperf"``` - -#### Run this script via Docker (beta) - -`cm docker script "get results inference inference-results mlcommons mlperf[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**source-repo**" -
    - Click here to expand this section. - - * `_ctuning` - - Environment variables: - - *GITHUB_REPO_OWNER*: `ctuning` - - Workflow: - * `_custom` - - Environment variables: - - *GITHUB_REPO_OWNER*: `arjunsuresh` - - Workflow: - * **`_mlcommons`** (default) - - Environment variables: - - *GITHUB_REPO_OWNER*: `mlcommons` - - Workflow: - * `_nvidia-only` - - Environment variables: - - *GITHUB_REPO_OWNER*: `GATEOverflow` - - *NVIDIA_ONLY*: `yes` - - Workflow: - -
    - - -#### Default variations - -`_mlcommons` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_CHECKOUT: `master` -* CM_GIT_DEPTH: `--depth 1` -* CM_GIT_PATCH: `no` - -
    - -#### Versions -Default version: `v3.1` - -* `v2.1` -* `v3.0` -* `v3.1` -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/_cm.json)*** - * get,git,repo - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-results/_cm.json) - -___ -### Script output -`cmr "get results inference inference-results mlcommons mlperf [,variations]" -j` -#### New environment keys (filter) - -* `CM_MLPERF_INFERENCE_RESULTS_*` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_INFERENCE_RESULTS_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-src/index.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-src/index.md deleted file mode 100644 index abc42a7a3..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-inference-src/index.md +++ /dev/null @@ -1,266 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-inference-src** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-src,4b57186581024797) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,src,source,inference,inference-src,inference-source,mlperf,mlcommons* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get src source inference inference-src inference-source mlperf mlcommons" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,src,source,inference,inference-src,inference-source,mlperf,mlcommons` - -`cm run script --tags=get,src,source,inference,inference-src,inference-source,mlperf,mlcommons[,variations] ` - -*or* - -`cmr "get src source inference inference-src inference-source mlperf mlcommons"` - -`cmr "get src source inference inference-src inference-source mlperf mlcommons [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,src,source,inference,inference-src,inference-source,mlperf,mlcommons' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,src,source,inference,inference-src,inference-source,mlperf,mlcommons"``` - -#### Run this script via Docker (beta) - -`cm docker script "get src source inference inference-src inference-source mlperf mlcommons[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_3d-unet` - - Environment variables: - - *CM_SUBMODULE_3D_UNET*: `yes` - - Workflow: - * `_deeplearningexamples` - - Environment variables: - - *CM_SUBMODULE_DEEPLEARNINGEXAMPLES*: `yes` - - Workflow: - * `_deepsparse` - - Environment variables: - - *CM_GIT_CHECKOUT*: `deepsparse` - - *CM_GIT_URL*: `https://github.com/neuralmagic/inference` - - *CM_MLPERF_LAST_RELEASE*: `v4.0` - - Workflow: - * `_gn` - - Environment variables: - - *CM_SUBMODULE_GN*: `yes` - - Workflow: - * `_no-recurse-submodules` - - Environment variables: - - *CM_GIT_RECURSE_SUBMODULES*: `` - - Workflow: - * `_nvidia-pycocotools` - - Environment variables: - - *CM_GIT_PATCH_FILENAME*: `coco.patch` - - Workflow: - * `_octoml` - - Environment variables: - - *CM_GIT_URL*: `https://github.com/octoml/inference` - - Workflow: - * `_openimages-nvidia-pycocotools` - - Environment variables: - - *CM_GIT_PATCH_FILENAME*: `openimages-pycocotools.patch` - - Workflow: - * `_patch` - - Environment variables: - - *CM_GIT_PATCH*: `yes` - - Workflow: - * `_pybind` - - Environment variables: - - *CM_SUBMODULE_PYBIND*: `yes` - - Workflow: - * `_recurse-submodules` - - Environment variables: - - *CM_GIT_RECURSE_SUBMODULES*: ` --recurse-submodules` - - Workflow: - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - * `_submodules.#` - - Environment variables: - - *CM_GIT_SUBMODULES*: `#` - - Workflow: - -
    - - - * Group "**checkout**" -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - * `_sha.#` - - Environment variables: - - *CM_GIT_SHA*: `#` - - Workflow: - -
    - - - * Group "**git-history**" -
    - Click here to expand this section. - - * `_full-history` - - Environment variables: - - *CM_GIT_DEPTH*: `` - - Workflow: - * **`_short-history`** (default) - - Environment variables: - - *CM_GIT_DEPTH*: `--depth 10` - - Workflow: - -
    - - -#### Default variations - -`_short-history` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_CHECKOUT_FOLDER: `inference` -* CM_GIT_DEPTH: `--depth 4` -* CM_GIT_PATCH: `no` -* CM_GIT_RECURSE_SUBMODULES: `` -* CM_GIT_URL: `https://github.com/mlcommons/inference.git` - -
    - -#### Versions -Default version: `master` - -* `custom` -* `deepsparse` -* `main` -* `master` -* `pybind_fix` -* `r2.1` -* `r3.0` -* `r3.1` -* `tvm` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/_cm.json)*** - * get,git,repo - * CM names: `--adr.['inference-git-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-src/_cm.json) - -___ -### Script output -`cmr "get src source inference inference-src inference-source mlperf mlcommons [,variations]" -j` -#### New environment keys (filter) - -* `+PYTHONPATH` -* `CM_MLPERF_INFERENCE_3DUNET_PATH` -* `CM_MLPERF_INFERENCE_BERT_PATH` -* `CM_MLPERF_INFERENCE_CLASSIFICATION_AND_DETECTION_PATH` -* `CM_MLPERF_INFERENCE_CONF_PATH` -* `CM_MLPERF_INFERENCE_DLRM_PATH` -* `CM_MLPERF_INFERENCE_DLRM_V2_PATH` -* `CM_MLPERF_INFERENCE_GPTJ_PATH` -* `CM_MLPERF_INFERENCE_RNNT_PATH` -* `CM_MLPERF_INFERENCE_SOURCE` -* `CM_MLPERF_INFERENCE_VERSION` -* `CM_MLPERF_INFERENCE_VISION_PATH` -* `CM_MLPERF_LAST_RELEASE` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_INFERENCE_3DUNET_PATH` -* `CM_MLPERF_INFERENCE_BERT_PATH` -* `CM_MLPERF_INFERENCE_CLASSIFICATION_AND_DETECTION_PATH` -* `CM_MLPERF_INFERENCE_CONF_PATH` -* `CM_MLPERF_INFERENCE_DLRM_PATH` -* `CM_MLPERF_INFERENCE_DLRM_V2_PATH` -* `CM_MLPERF_INFERENCE_GPTJ_PATH` -* `CM_MLPERF_INFERENCE_RNNT_PATH` -* `CM_MLPERF_INFERENCE_VISION_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir/index.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir/index.md deleted file mode 100644 index 94a3aa684..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-inference-submission-dir/index.md +++ /dev/null @@ -1,159 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-inference-submission-dir** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-submission-dir,ddf36a41d6934a7e) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,submission,dir,directory* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get mlperf inference submission dir directory" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,mlperf,inference,submission,dir,directory` - -`cm run script --tags=get,mlperf,inference,submission,dir,directory[,variations] [--input_flags]` - -*or* - -`cmr "get mlperf inference submission dir directory"` - -`cmr "get mlperf inference submission dir directory [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,mlperf,inference,submission,dir,directory' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,mlperf,inference,submission,dir,directory"``` - -#### Run this script via Docker (beta) - -`cm docker script "get mlperf inference submission dir directory[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**version**" -
    - Click here to expand this section. - - * `_version.#` - - Environment variables: - - *CM_MLPERF_INFERENCE_SUBMISSION_VERSION*: `#` - - Workflow: - * **`_version.4_0`** (default) - - Environment variables: - - *CM_MLPERF_INFERENCE_SUBMISSION_VERSION*: `4_0` - - Workflow: - -
    - - -#### Default variations - -`_version.4_0` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "submission_dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-submission-dir/_cm.json) - -___ -### Script output -`cmr "get mlperf inference submission dir directory [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_MLPERF_INFERENCE_SUBMISSION_DIR` -* `CM_MLPERF_INFERENCE_SUBMISSION_VERSION` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_INFERENCE_SUBMISSION_DIR` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs/index.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs/index.md deleted file mode 100644 index 9d5a01f19..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-configs/index.md +++ /dev/null @@ -1,161 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-inference-sut-configs** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-sut-configs,c2fbf72009e2445b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,inference,sut,configs,sut-configs* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get mlperf inference sut configs sut-configs" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,mlperf,inference,sut,configs,sut-configs` - -`cm run script --tags=get,mlperf,inference,sut,configs,sut-configs[,variations] [--input_flags]` - -*or* - -`cmr "get mlperf inference sut configs sut-configs"` - -`cmr "get mlperf inference sut configs sut-configs [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,mlperf,inference,sut,configs,sut-configs' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,mlperf,inference,sut,configs,sut-configs"``` - -#### Run this script via Docker (beta) - -`cm docker script "get mlperf inference sut configs sut-configs[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_octoml` - - Environment variables: - - *CM_SUT_USE_EXTERNAL_CONFIG_REPO*: `yes` - - *CM_GIT_CHECKOUT_FOLDER*: `configs` - - *CM_GIT_URL*: `https://github.com/arjunsuresh/mlperf-inference-configs` - - Workflow: - 1. ***Read "prehook_deps" on other CM scripts*** - * get,git,repo,_repo.mlperf_inference_configs_octoml - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--configs_git_url=value` → `CM_GIT_URL=value` -* `--repo_path=value` → `CM_SUT_CONFIGS_PATH=value` -* `--run_config=value` → `CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "configs_git_url":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_SUT_CONFIGS_PATH: `` -* CM_GIT_URL: `` - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/_cm.json) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-configs/_cm.json) - -___ -### Script output -`cmr "get mlperf inference sut configs sut-configs [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_HW_*` -* `CM_SUT_*` -#### New environment keys auto-detected from customize - -* `CM_HW_NAME` -* `CM_SUT_NAME` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description/index.md b/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description/index.md deleted file mode 100644 index 4f2f559a5..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-inference-sut-description/index.md +++ /dev/null @@ -1,159 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-inference-sut-description** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-inference-sut-description,e49a3f758b2d4e7b) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,sut,description,system-under-test,system-description* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get mlperf sut description system-under-test system-description" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,mlperf,sut,description,system-under-test,system-description` - -`cm run script --tags=get,mlperf,sut,description,system-under-test,system-description [--input_flags]` - -*or* - -`cmr "get mlperf sut description system-under-test system-description"` - -`cmr "get mlperf sut description system-under-test system-description " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,mlperf,sut,description,system-under-test,system-description' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,mlperf,sut,description,system-under-test,system-description"``` - -#### Run this script via Docker (beta) - -`cm docker script "get mlperf sut description system-under-test system-description" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--name=value` → `CM_HW_NAME=value` -* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "name":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_SUT_DESC_CACHE: `no` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,compiler - * CM names: `--adr.['compiler']...` - - CM script: [get-cl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cl) - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - - CM script: [get-llvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-llvm) - * get,cuda-devices - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_DEVICE': ['gpu', 'cuda']}` - - CM script: [get-cuda-devices](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda-devices) - * detect,sudo - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DETERMINE_MEMORY_CONFIGURATION': ['yes'], 'CM_HOST_OS_TYPE': ['linux']}` - - CM script: [detect-sudo](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-sudo) - * get,generic-python-lib,_package.dmiparser - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-inference-sut-description/_cm.json) - -___ -### Script output -`cmr "get mlperf sut description system-under-test system-description " [--input_flags] -j` -#### New environment keys (filter) - -* `CM_HW_*` -* `CM_SUT_*` -#### New environment keys auto-detected from customize - -* `CM_HW_NAME` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-logging/index.md b/docs/MLPerf-benchmark-support/get-mlperf-logging/index.md deleted file mode 100644 index 02dea1217..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-logging/index.md +++ /dev/null @@ -1,127 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-logging** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-logging,c9830dc6f87b4dc6) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,mlperf,logging,mlperf-logging* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get mlperf logging mlperf-logging" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,mlperf,logging,mlperf-logging` - -`cm run script --tags=get,mlperf,logging,mlperf-logging ` - -*or* - -`cmr "get mlperf logging mlperf-logging"` - -`cmr "get mlperf logging mlperf-logging " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,mlperf,logging,mlperf-logging' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,mlperf,logging,mlperf-logging"``` - -#### Run this script via Docker (beta) - -`cm docker script "get mlperf logging mlperf-logging" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,git,repo,_repo.https://github.com/mlcommons/logging - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-logging/_cm.json) - -___ -### Script output -`cmr "get mlperf logging mlperf-logging " -j` -#### New environment keys (filter) - -* `+PYTHONPATH` -* `CM_MLPERF_LOGGING_*` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_LOGGING_SRC_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-power-dev/index.md b/docs/MLPerf-benchmark-support/get-mlperf-power-dev/index.md deleted file mode 100644 index 0df4b325b..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-power-dev/index.md +++ /dev/null @@ -1,171 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-power-dev** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-power-dev,72aa56768c994bcf) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,src,source,power,power-dev,mlperf,mlcommons* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get src source power power-dev mlperf mlcommons" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,src,source,power,power-dev,mlperf,mlcommons` - -`cm run script --tags=get,src,source,power,power-dev,mlperf,mlcommons[,variations] ` - -*or* - -`cmr "get src source power power-dev mlperf mlcommons"` - -`cmr "get src source power power-dev mlperf mlcommons [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,src,source,power,power-dev,mlperf,mlcommons' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,src,source,power,power-dev,mlperf,mlcommons"``` - -#### Run this script via Docker (beta) - -`cm docker script "get src source power power-dev mlperf mlcommons[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**checkout**" -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - * `_sha.#` - - Environment variables: - - *CM_GIT_SHA*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - - - * Group "**repo**" -
    - Click here to expand this section. - - * **`_mlcommons`** (default) - - Environment variables: - - *CM_GIT_URL*: `https://github.com/mlcommons/power-dev.git` - - Workflow: - * `_octoml` - - Environment variables: - - *CM_GIT_URL*: `https://github.com/octoml/power-dev.git` - - Workflow: - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - -
    - - -#### Default variations - -`_mlcommons` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_DEPTH: `--depth 1` -* CM_GIT_PATCH: `no` -* CM_GIT_CHECKOUT_FOLDER: `power-dev` - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/_cm.json)*** - * get,git,repo - * CM names: `--adr.['mlperf-power-dev-git-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-power-dev/_cm.json) - -___ -### Script output -`cmr "get src source power power-dev mlperf mlcommons [,variations]" -j` -#### New environment keys (filter) - -* `CM_MLPERF_POWER_SOURCE` -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src/index.md b/docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src/index.md deleted file mode 100644 index f06d6d2aa..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src/index.md +++ /dev/null @@ -1,129 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-tiny-eembc-energy-runner-src** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-tiny-eembc-energy-runner-src,c7da8d1ce4164a4b) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner` - -`cm run script --tags=get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner ` - -*or* - -`cmr "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner"` - -`cmr "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,src,source,eembc,energyrunner,energy-runner,eembc-energy-runner,tinymlperf-energy-runner"``` - -#### Run this script via Docker (beta) - -`cm docker script "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_CHECKOUT: `main` -* CM_GIT_PATCH: `no` -* CM_GIT_RECURSE_SUBMODULES: `` -* CM_GIT_URL: `https://github.com/eembc/energyrunner` - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-eembc-energy-runner-src/_cm.json) - -___ -### Script output -`cmr "get src source eembc energyrunner energy-runner eembc-energy-runner tinymlperf-energy-runner " -j` -#### New environment keys (filter) - -* `+PYTHONPATH` -* `CM_EEMBC_ENERGY_RUNNER_*` -#### New environment keys auto-detected from customize - -* `CM_EEMBC_ENERGY_RUNNER_DATASETS` -* `CM_EEMBC_ENERGY_RUNNER_SESSIONS` -* `CM_EEMBC_ENERGY_RUNNER_SRC` -* `CM_EEMBC_ENERGY_RUNNER_SRC_DATASETS` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-tiny-src/index.md b/docs/MLPerf-benchmark-support/get-mlperf-tiny-src/index.md deleted file mode 100644 index 7706dac15..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-tiny-src/index.md +++ /dev/null @@ -1,143 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-tiny-src** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-tiny-src,777843a0bb034524) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons` - -`cm run script --tags=get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons ` - -*or* - -`cmr "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons"` - -`cmr "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,src,source,tiny,tiny-src,tiny-source,tinymlperf,tinymlperf-src,mlperf,mlcommons"``` - -#### Run this script via Docker (beta) - -`cm docker script "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_CHECKOUT: `master` -* CM_GIT_PATCH: `no` -* CM_GIT_RECURSE_SUBMODULES: `` -* CM_GIT_URL: `https://github.com/mlcommons/tiny.git` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-tiny-src/_cm.json) - -___ -### Script output -`cmr "get src source tiny tiny-src tiny-source tinymlperf tinymlperf-src mlperf mlcommons " -j` -#### New environment keys (filter) - -* `+PYTHONPATH` -* `CM_MLPERF_TINY_*` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_TINY_BENCHMARK` -* `CM_MLPERF_TINY_DATASETS` -* `CM_MLPERF_TINY_DATASETS_AD` -* `CM_MLPERF_TINY_DATASETS_IC` -* `CM_MLPERF_TINY_DATASETS_KWS` -* `CM_MLPERF_TINY_DATASETS_KWS_OPEN` -* `CM_MLPERF_TINY_DATASETS_VWW` -* `CM_MLPERF_TINY_SRC` -* `CM_MLPERF_TINY_TRAINING` -* `CM_MLPERF_TINY_TRAINING_AD` -* `CM_MLPERF_TINY_TRAINING_IC` -* `CM_MLPERF_TINY_TRAINING_KWS` -* `CM_MLPERF_TINY_TRAINING_VWW` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code/index.md b/docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code/index.md deleted file mode 100644 index e29373502..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-training-nvidia-code/index.md +++ /dev/null @@ -1,158 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-training-nvidia-code** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-training-nvidia-code,fdc630b1d41743c5) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,nvidia,mlperf,training,code,training-code* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get nvidia mlperf training code training-code" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,nvidia,mlperf,training,code,training-code` - -`cm run script --tags=get,nvidia,mlperf,training,code,training-code[,variations] ` - -*or* - -`cmr "get nvidia mlperf training code training-code"` - -`cmr "get nvidia mlperf training code training-code [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,nvidia,mlperf,training,code,training-code' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,nvidia,mlperf,training,code,training-code"``` - -#### Run this script via Docker (beta) - -`cm docker script "get nvidia mlperf training code training-code[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**repo-owner**" -
    - Click here to expand this section. - - * `_ctuning` - - Environment variables: - - *CM_TMP_TRAINING_SRC*: `ctuning` - - Workflow: - * `_custom` - - Workflow: - * **`_mlcommons`** (default) - - Environment variables: - - *CM_TMP_TRAINING_SRC*: `mlcommons` - - Workflow: - * `_nvidia-only` - - Environment variables: - - *CM_TMP_TRAINING_SRC*: `GATEOverflow` - - Workflow: - -
    - - -#### Default variations - -`_mlcommons` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `r3.0` - -* `r2.1` -* `r3.0` -* `r3.1` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/_cm.json)*** - * get,git,repo - * CM names: `--adr.['mlperf-training-results']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-nvidia-code/_cm.json) - -___ -### Script output -`cmr "get nvidia mlperf training code training-code [,variations]" -j` -#### New environment keys (filter) - -* `CM_MLPERF_TRAINING_NVIDIA_CODE_PATH` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_TRAINING_NVIDIA_CODE_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/get-mlperf-training-src/index.md b/docs/MLPerf-benchmark-support/get-mlperf-training-src/index.md deleted file mode 100644 index aaecc7818..000000000 --- a/docs/MLPerf-benchmark-support/get-mlperf-training-src/index.md +++ /dev/null @@ -1,224 +0,0 @@ -Automatically generated README for this automation recipe: **get-mlperf-training-src** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-mlperf-training-src,dc440bd88e794a28) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,src,source,training,training-src,training-source,mlperf,mlcommons* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get src source training training-src training-source mlperf mlcommons" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,src,source,training,training-src,training-source,mlperf,mlcommons` - -`cm run script --tags=get,src,source,training,training-src,training-source,mlperf,mlcommons[,variations] ` - -*or* - -`cmr "get src source training training-src training-source mlperf mlcommons"` - -`cmr "get src source training training-src training-source mlperf mlcommons [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,src,source,training,training-src,training-source,mlperf,mlcommons' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,src,source,training,training-src,training-source,mlperf,mlcommons"``` - -#### Run this script via Docker (beta) - -`cm docker script "get src source training training-src training-source mlperf mlcommons[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_no-recurse-submodules` - - Environment variables: - - *CM_GIT_RECURSE_SUBMODULES*: `` - - Workflow: - * `_nvidia-retinanet` - - Environment variables: - - *CM_GIT_PATCH_FILENAMES*: `nvidia-retinanet.patch,cpu_load.patch` - - Workflow: - * `_patch` - - Environment variables: - - *CM_GIT_PATCH*: `yes` - - Workflow: - -
    - - - * Group "**checkout**" -
    - Click here to expand this section. - - * `_branch.#` - - Environment variables: - - *CM_GIT_CHECKOUT*: `#` - - Workflow: - * `_sha.#` - - Environment variables: - - *CM_GIT_SHA*: `#` - - Workflow: - * `_tag.#` - - Environment variables: - - *CM_GIT_CHECKOUT_TAG*: `#` - - Workflow: - -
    - - - * Group "**git-history**" -
    - Click here to expand this section. - - * `_full-history` - - Environment variables: - - *CM_GIT_DEPTH*: `` - - Workflow: - * **`_short-history`** (default) - - Environment variables: - - *CM_GIT_DEPTH*: `--depth 5` - - Workflow: - -
    - - - * Group "**repo**" -
    - Click here to expand this section. - - * `_repo.#` - - Environment variables: - - *CM_GIT_URL*: `#` - - Workflow: - -
    - - - * Group "**src**" -
    - Click here to expand this section. - - * **`_cknowledge`** (default) - - Environment variables: - - *CM_GIT_URL*: `https://github.com/cknowledge/training.git` - - Workflow: - * `_mlcommons` - - Environment variables: - - *CM_GIT_URL*: `https://github.com/mlcommons/training.git` - - Workflow: - -
    - - -#### Default variations - -`_cknowledge,_short-history` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_CHECKOUT: `master` -* CM_GIT_DEPTH: `--depth 4` -* CM_GIT_PATCH: `no` -* CM_GIT_RECURSE_SUBMODULES: ` --recurse-submodules` -* CM_GIT_CHECKOUT_FOLDER: `training` - -
    - -#### Versions -Default version: `master` - -* `custom` -* `master` -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/_cm.json)*** - * get,git,repo - * CM names: `--adr.['mlperf-training-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-mlperf-training-src/_cm.json) - -___ -### Script output -`cmr "get src source training training-src training-source mlperf mlcommons [,variations]" -j` -#### New environment keys (filter) - -* `+PYTHONPATH` -* `CM_MLPERF_TRAINING_*` -* `CM_MLPERF_TRAINING_LAST_RELEASE` -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/get-nvidia-mitten/index.md b/docs/MLPerf-benchmark-support/get-nvidia-mitten/index.md deleted file mode 100644 index f6467da3f..000000000 --- a/docs/MLPerf-benchmark-support/get-nvidia-mitten/index.md +++ /dev/null @@ -1,132 +0,0 @@ -Automatically generated README for this automation recipe: **get-nvidia-mitten** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-nvidia-mitten,1c045f2902374de9) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,nvidia,mitten,nvidia-mitten* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get nvidia mitten nvidia-mitten" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,nvidia,mitten,nvidia-mitten` - -`cm run script --tags=get,nvidia,mitten,nvidia-mitten ` - -*or* - -`cmr "get nvidia mitten nvidia-mitten"` - -`cmr "get nvidia mitten nvidia-mitten " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,nvidia,mitten,nvidia-mitten' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,nvidia,mitten,nvidia-mitten"``` - -#### Run this script via Docker (beta) - -`cm docker script "get nvidia mitten nvidia-mitten" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `master` - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_pycuda - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,git,_repo.https://github.com/NVIDIA/mitten - * CM names: `--adr.['nvidia-mitten-git-src']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-nvidia-mitten/_cm.json) - -___ -### Script output -`cmr "get nvidia mitten nvidia-mitten " -j` -#### New environment keys (filter) - -* `CM_NVIDIA_MITTEN*` -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/get-spec-ptd/index.md b/docs/MLPerf-benchmark-support/get-spec-ptd/index.md deleted file mode 100644 index 5c2797227..000000000 --- a/docs/MLPerf-benchmark-support/get-spec-ptd/index.md +++ /dev/null @@ -1,164 +0,0 @@ -Automatically generated README for this automation recipe: **get-spec-ptd** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-spec-ptd,7423a878e4524136) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons` - -`cm run script --tags=get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons [--input_flags]` - -*or* - -`cmr "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons"` - -`cmr "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons " [--input_flags]` - - - -#### Input Flags - -* --**input**=Path to SPEC PTDaemon (Optional) - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "input":...} -``` -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,spec,ptd,ptdaemon,power,daemon,power-daemon,mlperf,mlcommons"``` - -#### Run this script via Docker (beta) - -`cm docker script "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--input=value` → `CM_INPUT=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "input":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_GIT_CHECKOUT: `main` -* CM_GIT_DEPTH: `--depth 1` -* CM_GIT_PATCH: `no` -* CM_GIT_RECURSE_SUBMODULES: ` ` -* CM_GIT_URL: `https://github.com/mlcommons/power.git` - -
    - -#### Versions -Default version: `main` - -* `custom` -* `main` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,git,repo,_repo.https://github.com/mlcommons/power - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-spec-ptd/_cm.json) - -___ -### Script output -`cmr "get spec ptd ptdaemon power daemon power-daemon mlperf mlcommons " [--input_flags] -j` -#### New environment keys (filter) - -* `CM_MLPERF_PTD_PATH` -* `CM_SPEC_PTD_PATH` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_PTD_PATH` -* `CM_SPEC_PTD_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment/index.md b/docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment/index.md deleted file mode 100644 index f7708790e..000000000 --- a/docs/MLPerf-benchmark-support/import-mlperf-inference-to-experiment/index.md +++ /dev/null @@ -1,152 +0,0 @@ -Automatically generated README for this automation recipe: **import-mlperf-inference-to-experiment** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=import-mlperf-inference-to-experiment,72099fa962ea499c) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "import mlperf inference mlperf-inference experiment 2experiment to-experiment" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment` - -`cm run script --tags=import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment[,variations] [--input_flags]` - -*or* - -`cmr "import mlperf inference mlperf-inference experiment 2experiment to-experiment"` - -`cmr "import mlperf inference mlperf-inference experiment 2experiment to-experiment [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="import,mlperf,inference,mlperf-inference,experiment,2experiment,to-experiment"``` - -#### Run this script via Docker (beta) - -`cm docker script "import mlperf inference mlperf-inference experiment 2experiment to-experiment[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_skip_checker` - - Environment variables: - - *CM_SKIP_SUBMISSION_CHECKER*: `True` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` -* `--target_repo=value` → `CM_IMPORT_MLPERF_INFERENCE_TARGET_REPO=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "submitter":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/_cm.yaml) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/_cm.yaml) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-inference-to-experiment/_cm.yaml) - -___ -### Script output -`cmr "import mlperf inference mlperf-inference experiment 2experiment to-experiment [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment/index.md b/docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment/index.md deleted file mode 100644 index 99d55bc71..000000000 --- a/docs/MLPerf-benchmark-support/import-mlperf-tiny-to-experiment/index.md +++ /dev/null @@ -1,135 +0,0 @@ -Automatically generated README for this automation recipe: **import-mlperf-tiny-to-experiment** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=import-mlperf-tiny-to-experiment,83e3efd7611f469b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment` - -`cm run script --tags=import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment [--input_flags]` - -*or* - -`cmr "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment"` - -`cmr "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="import,mlperf,tiny,mlperf-tiny,experiment,2experiment,to-experiment"``` - -#### Run this script via Docker (beta) - -`cm docker script "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--target_repo=value` → `CM_IMPORT_TINYMLPERF_TARGET_REPO=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "target_repo":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/_cm.yaml) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/_cm.yaml) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-tiny-to-experiment/_cm.yaml) - -___ -### Script output -`cmr "import mlperf tiny mlperf-tiny experiment 2experiment to-experiment " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment/index.md b/docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment/index.md deleted file mode 100644 index e5f76bed7..000000000 --- a/docs/MLPerf-benchmark-support/import-mlperf-training-to-experiment/index.md +++ /dev/null @@ -1,141 +0,0 @@ -Automatically generated README for this automation recipe: **import-mlperf-training-to-experiment** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=import-mlperf-training-to-experiment,b13d9b7337414f17) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "import mlperf training mlperf-training experiment 2experiment to-experiment" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment` - -`cm run script --tags=import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment [--input_flags]` - -*or* - -`cmr "import mlperf training mlperf-training experiment 2experiment to-experiment"` - -`cmr "import mlperf training mlperf-training experiment 2experiment to-experiment " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="import,mlperf,training,mlperf-training,experiment,2experiment,to-experiment"``` - -#### Run this script via Docker (beta) - -`cm docker script "import mlperf training mlperf-training experiment 2experiment to-experiment" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--target_repo=value` → `CM_IMPORT_MLPERF_TRAINING_TARGET_REPO=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "target_repo":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlperf,logging - - CM script: [get-mlperf-logging](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-logging) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/_cm.yaml) - 1. ***Run native script if exists*** - * [run_mlperf_logger.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/run_mlperf_logger.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/_cm.yaml) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/import-mlperf-training-to-experiment/_cm.yaml) - -___ -### Script output -`cmr "import mlperf training mlperf-training experiment 2experiment to-experiment " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/index.md b/docs/MLPerf-benchmark-support/index.md new file mode 100644 index 000000000..334504588 --- /dev/null +++ b/docs/MLPerf-benchmark-support/index.md @@ -0,0 +1,44 @@ +The MLPerf benchmark support category contains the following scripts: + +- [add-custom-nvidia-system](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/add-custom-nvidia-system/README.md) +- [benchmark-any-mlperf-inference-implementation](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/benchmark-any-mlperf-inference-implementation/README.md) +- [build-mlperf-inference-server-nvidia](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/build-mlperf-inference-server-nvidia/README.md) +- [generate-mlperf-inference-submission](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/generate-mlperf-inference-submission/README.md) +- [generate-mlperf-inference-user-conf](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/generate-mlperf-inference-user-conf/README.md) +- [generate-mlperf-tiny-report](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/generate-mlperf-tiny-report/README.md) +- [generate-mlperf-tiny-submission](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/generate-mlperf-tiny-submission/README.md) +- [generate-nvidia-engine](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/generate-nvidia-engine/README.md) +- [get-mlperf-inference-intel-scratch-space](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-inference-intel-scratch-space/README.md) +- [get-mlperf-inference-loadgen](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-inference-loadgen/README.md) +- [get-mlperf-inference-nvidia-common-code](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-inference-nvidia-common-code/README.md) +- [get-mlperf-inference-nvidia-scratch-space](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-inference-nvidia-scratch-space/README.md) +- [get-mlperf-inference-results](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-inference-results/README.md) +- [get-mlperf-inference-results-dir](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-inference-results-dir/README.md) +- [get-mlperf-inference-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-inference-src/README.md) +- [get-mlperf-inference-submission-dir](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-inference-submission-dir/README.md) +- [get-mlperf-inference-sut-configs](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-inference-sut-configs/README.md) +- [get-mlperf-inference-sut-description](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-inference-sut-description/README.md) +- [get-mlperf-logging](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-logging/README.md) +- [get-mlperf-power-dev](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-power-dev/README.md) +- [get-mlperf-tiny-eembc-energy-runner-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-tiny-eembc-energy-runner-src/README.md) +- [get-mlperf-tiny-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-tiny-src/README.md) +- [get-mlperf-training-nvidia-code](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-training-nvidia-code/README.md) +- [get-mlperf-training-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-mlperf-training-src/README.md) +- [get-nvidia-mitten](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-nvidia-mitten/README.md) +- [get-spec-ptd](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-spec-ptd/README.md) +- [import-mlperf-inference-to-experiment](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/import-mlperf-inference-to-experiment/README.md) +- [import-mlperf-tiny-to-experiment](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/import-mlperf-tiny-to-experiment/README.md) +- [import-mlperf-training-to-experiment](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/import-mlperf-training-to-experiment/README.md) +- [install-mlperf-logging-from-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-mlperf-logging-from-src/README.md) +- [prepare-training-data-bert](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/prepare-training-data-bert/README.md) +- [prepare-training-data-resnet](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/prepare-training-data-resnet/README.md) +- [preprocess-mlperf-inference-submission](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/preprocess-mlperf-inference-submission/README.md) +- [process-mlperf-accuracy](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/process-mlperf-accuracy/README.md) +- [push-mlperf-inference-results-to-github](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/push-mlperf-inference-results-to-github/README.md) +- [run-all-mlperf-models](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/run-all-mlperf-models/README.md) +- [run-mlperf-inference-mobilenet-models](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/run-mlperf-inference-mobilenet-models/README.md) +- [run-mlperf-inference-submission-checker](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/run-mlperf-inference-submission-checker/README.md) +- [run-mlperf-power-client](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/run-mlperf-power-client/README.md) +- [run-mlperf-power-server](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/run-mlperf-power-server/README.md) +- [run-mlperf-training-submission-checker](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/run-mlperf-training-submission-checker/README.md) +- [truncate-mlperf-inference-accuracy-log](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/truncate-mlperf-inference-accuracy-log/README.md) diff --git a/docs/MLPerf-benchmark-support/install-mlperf-logging-from-src/index.md b/docs/MLPerf-benchmark-support/install-mlperf-logging-from-src/index.md deleted file mode 100644 index 885a883f2..000000000 --- a/docs/MLPerf-benchmark-support/install-mlperf-logging-from-src/index.md +++ /dev/null @@ -1,126 +0,0 @@ -Automatically generated README for this automation recipe: **install-mlperf-logging-from-src** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-mlperf-logging-from-src,f67cb84a5dc942c3) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *install,mlperf,logging,from.src* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install mlperf logging from.src" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,mlperf,logging,from.src` - -`cm run script --tags=install,mlperf,logging,from.src ` - -*or* - -`cmr "install mlperf logging from.src"` - -`cmr "install mlperf logging from.src " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,mlperf,logging,from.src' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,mlperf,logging,from.src"``` - -#### Run this script via Docker (beta) - -`cm docker script "install mlperf logging from.src" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -* `master` -* `v3.1` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/_cm.yaml)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,git,repo,_repo.https://github.com/mlcommons/logging - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/_cm.yaml) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-mlperf-logging-from-src/_cm.yaml) - -___ -### Script output -`cmr "install mlperf logging from.src " -j` -#### New environment keys (filter) - -* `CM_MLPERF_LOGGING_REPO_PATH` -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/prepare-training-data-bert/index.md b/docs/MLPerf-benchmark-support/prepare-training-data-bert/index.md deleted file mode 100644 index fc0386cbd..000000000 --- a/docs/MLPerf-benchmark-support/prepare-training-data-bert/index.md +++ /dev/null @@ -1,193 +0,0 @@ -Automatically generated README for this automation recipe: **prepare-training-data-bert** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=prepare-training-data-bert,1e06a7abe23545eb) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *prepare,mlperf,training,data,input,bert* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "prepare mlperf training data input bert" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=prepare,mlperf,training,data,input,bert` - -`cm run script --tags=prepare,mlperf,training,data,input,bert[,variations] [--input_flags]` - -*or* - -`cmr "prepare mlperf training data input bert"` - -`cmr "prepare mlperf training data input bert [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'prepare,mlperf,training,data,input,bert' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="prepare,mlperf,training,data,input,bert"``` - -#### Run this script via Docker (beta) - -`cm docker script "prepare mlperf training data input bert[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**implementation**" -
    - Click here to expand this section. - - * **`_nvidia`** (default) - - Environment variables: - - *CM_TMP_VARIATION*: `nvidia` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,git,repo,_repo.https://github.com/wchen61/training_results_v2.1,_branch.fix_bert_prepare_data - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - * `_reference` - - Environment variables: - - *CM_TMP_VARIATION*: `reference` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,mlperf,training,src - * CM names: `--adr.['mlperf-training-src']...` - - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) - * get,python3 - * CM names: `--adr.['python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_tensorflow - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_protobuf - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - -#### Default variations - -`_nvidia` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--clean=value` → `CM_MLPERF_TRAINING_CLEAN_TFRECORDS=value` -* `--data_dir=value` → `CM_DATA_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "clean":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/_cm.json)*** - * download,file,_gdown,_url.https://drive.google.com/uc?id=1fbGClQMi2CoMv7fwrwTC5YYPooQBdcFW - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - * download,file,_gdown,_url.https://drive.google.com/uc?id=1USK108J6hMM_d27xCHi738qBL8_BT1u1 - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - * download,file,_gdown,_url.https://drive.google.com/uc?id=1tmMgLwoBvbEJEHXh77sqrXYw5RpqT8R_ - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - * download-and-extract,file,_gdown,_extract,_url.https://drive.google.com/uc?id=14xV2OUGSQDG_yDBrmbSdcDC-QGeqpfs_ - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - * download,file,_gdown,_url.https://drive.google.com/uc?id=1chiTBljF0Eh1U5pKs6ureVHgSbtU8OG_ - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - * download,file,_gdown,_url.https://drive.google.com/uc?id=1Q47V3K3jFRkbJ2zGCrKkKk-n0fvMZsa0 - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - * download,file,_gdown,_url.https://drive.google.com/uc?id=1vAcVmXSLsLeQ1q7gvHnQUSth5W_f_pwv - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - 1. ***Run native script if exists*** - * [run-nvidia.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/run-nvidia.sh) - * [run-reference.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/run-reference.sh) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-bert/_cm.json) - -___ -### Script output -`cmr "prepare mlperf training data input bert [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_MLPERF_TRAINING_BERT_*` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_TRAINING_BERT_CONFIG_PATH` -* `CM_MLPERF_TRAINING_BERT_DATA_PATH` -* `CM_MLPERF_TRAINING_BERT_TFRECORDS_PATH` -* `CM_MLPERF_TRAINING_BERT_VOCAB_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/prepare-training-data-resnet/index.md b/docs/MLPerf-benchmark-support/prepare-training-data-resnet/index.md deleted file mode 100644 index 1b2907e22..000000000 --- a/docs/MLPerf-benchmark-support/prepare-training-data-resnet/index.md +++ /dev/null @@ -1,206 +0,0 @@ -Automatically generated README for this automation recipe: **prepare-training-data-resnet** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=prepare-training-data-resnet,d42a8a8ca2704f9f) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *prepare,mlperf,training,data,input,resnet* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "prepare mlperf training data input resnet" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=prepare,mlperf,training,data,input,resnet` - -`cm run script --tags=prepare,mlperf,training,data,input,resnet[,variations] [--input_flags]` - -*or* - -`cmr "prepare mlperf training data input resnet"` - -`cmr "prepare mlperf training data input resnet [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'prepare,mlperf,training,data,input,resnet' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="prepare,mlperf,training,data,input,resnet"``` - -#### Run this script via Docker (beta) - -`cm docker script "prepare mlperf training data input resnet[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_mxnet.#` - - Environment variables: - - *CM_MXNET_VERSION*: `#` - - Workflow: - -
    - - - * Group "**implementation**" -
    - Click here to expand this section. - - * **`_nvidia`** (default) - - Environment variables: - - *CM_TMP_VARIATION*: `nvidia` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,mlperf,training,nvidia,code - * CM names: `--adr.['nvidia-training-code']...` - - CM script: [get-mlperf-training-nvidia-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-nvidia-code) - * get,git,repo,_repo.https://github.com/NVIDIA/DeepLearningExamples,_sha.81ee705868a11d6fe18c12d237abe4a08aab5fd6 - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - * `_reference` - - Environment variables: - - *CM_TMP_VARIATION*: `reference` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,mlperf,training,src - * CM names: `--adr.['mlperf-training-src']...` - - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) - * get,python3 - * CM names: `--adr.['python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_tensorflow - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_protobuf - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - -#### Default variations - -`_nvidia` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--data_dir=value` → `CM_DATA_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "data_dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/_cm.json)*** - * get,dataset,imagenet,train - * CM names: `--adr.['imagenet-train']...` - - CM script: [get-dataset-imagenet-train](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-train) - * get,dataset,imagenet,val,original,_full - * CM names: `--adr.['imagenet-val']...` - - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) - * get,generic-sys-util,_rsync - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/_cm.json)*** - * download,file,_wget,_url.https://raw.githubusercontent.com/tensorflow/models/master/research/slim/datasets/imagenet_2012_validation_synset_labels.txt - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - * download,file,_wget,_url.https://raw.githubusercontent.com/tensorflow/tpu/master/tools/datasets/imagenet_to_gcs.py - * Enable this dependency only if all ENV vars are set:
    -`{'CM_TMP_VARIATION': ['reference']}` - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - 1. ***Run native script if exists*** - * [run-nvidia.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/run-nvidia.sh) - * [run-reference.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/run-reference.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/prepare-training-data-resnet/_cm.json) - -___ -### Script output -`cmr "prepare mlperf training data input resnet [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_MLPERF_TRAINING_NVIDIA_RESNET_PREPROCESSED_PATH` -* `CM_MLPERF_TRAINING_RESNET_*` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_TRAINING_NVIDIA_RESNET_PREPROCESSED_PATH` -* `CM_MLPERF_TRAINING_RESNET_DATA_PATH` -* `CM_MLPERF_TRAINING_RESNET_TFRECORDS_PATH` \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission/index.md b/docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission/index.md deleted file mode 100644 index 20b71fd73..000000000 --- a/docs/MLPerf-benchmark-support/preprocess-mlperf-inference-submission/index.md +++ /dev/null @@ -1,144 +0,0 @@ -Automatically generated README for this automation recipe: **preprocess-mlperf-inference-submission** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=preprocess-mlperf-inference-submission,c23068394a314266) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess` - -`cm run script --tags=run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess [--input_flags]` - -*or* - -`cmr "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess"` - -`cmr "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,inference,submission,mlperf-inference,processor,preprocessor,preprocess"``` - -#### Run this script via Docker (beta) - -`cm docker script "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` -* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "submission_dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/_cm.json)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src', 'submission-checker-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,mlperf,submission,dir - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` - * CM names: `--adr.['get-mlperf-submission-dir']...` - - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/preprocess-mlperf-inference-submission/_cm.json) - -___ -### Script output -`cmr "run mlc mlcommons mlperf inference submission mlperf-inference processor preprocessor preprocess " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/process-mlperf-accuracy/index.md b/docs/MLPerf-benchmark-support/process-mlperf-accuracy/index.md deleted file mode 100644 index 47b3f0b5f..000000000 --- a/docs/MLPerf-benchmark-support/process-mlperf-accuracy/index.md +++ /dev/null @@ -1,334 +0,0 @@ -Automatically generated README for this automation recipe: **process-mlperf-accuracy** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=process-mlperf-accuracy,6e809013816b42ea) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run mlperf mlcommons accuracy mlc process process-accuracy" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy` - -`cm run script --tags=run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy[,variations] [--input_flags]` - -*or* - -`cmr "run mlperf mlcommons accuracy mlc process process-accuracy"` - -`cmr "run mlperf mlcommons accuracy mlc process process-accuracy [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,mlperf,mlcommons,accuracy,mlc,process,process-accuracy"``` - -#### Run this script via Docker (beta) - -`cm docker script "run mlperf mlcommons accuracy mlc process process-accuracy[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_default-pycocotools,openimages` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_pycocotools - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,mlcommons,mlperf,inference,src,-_openimages-nvidia-pycocotools - * CM names: `--adr.['for-pycocotools', 'accuracy-check-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * `_nvidia-pycocotools,openimages` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_nvidia-pycocotools - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,mlcommons,mlperf,inference,src,_openimages-nvidia-pycocotools - * CM names: `--adr.['for-pycocotools', 'accuracy-check-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - -
    - - - * Group "**coco-evaluation-tool**" -
    - Click here to expand this section. - - * **`_default-pycocotools`** (default) - - Workflow: - * `_nvidia-pycocotools` - - Workflow: - -
    - - - * Group "**dataset**" -
    - Click here to expand this section. - - * `_cnndm` - - Environment variables: - - *CM_DATASET*: `cnndm` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,dataset,cnndm,_validation - - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) - * get,generic-python-lib,_package.rouge_score - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.nltk - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.evaluate - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.absl-py - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.rouge_score - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_coco2014` - - Environment variables: - - *CM_DATASET*: `coco2014` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,dataset,coco2014,original - * CM names: `--adr.['coco2014-dataset', 'coco2014-original']...` - - CM script: [get-dataset-coco2014](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-coco2014) - * **`_imagenet`** (default) - - Environment variables: - - *CM_DATASET*: `imagenet` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,dataset-aux,image-classification,imagenet-aux - - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_kits19` - - Environment variables: - - *CM_DATASET*: `kits19` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,dataset,preprocessed,medical-imaging,kits19 - - CM script: [get-preprocessed-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-kits19) - * `_librispeech` - - Environment variables: - - *CM_DATASET*: `librispeech` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,dataset,preprocessed,speech-recognition,librispeech - - CM script: [get-preprocessed-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-librispeech) - * `_open-orca` - - Environment variables: - - *CM_DATASET*: `openorca` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,dataset,openorca,preprocessed - * CM names: `--adr.['openorca-dataset']...` - - CM script: [get-preprocessed-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openorca) - * get,ml-model,llama2 - * CM names: `--adr.['llama2-model']...` - - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) - * `_openimages` - - Environment variables: - - *CM_DATASET*: `openimages` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,dataset-aux,openimages,annotations - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_RUN_STYLE': ['valid']}` - - CM script: [get-dataset-openimages-annotations](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages-annotations) - * get,dataset,openimages,original - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_RUN_STYLE': ['valid']}` - * CM names: `--adr.['openimages-original']...` - - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) - * get,generic-python-lib,_package.kiwisolver - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_squad` - - Environment variables: - - *CM_DATASET*: `squad` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_boto3 - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.transformers - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,dataset,squad,language-processing - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_DATASET_SQUAD_VAL_PATH': []}` - - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) - * get,dataset-aux,squad-vocab - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH': ['on']}` - - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) - * get,generic-python-lib,_torch - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_tokenization - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_terabyte` - - Environment variables: - - *CM_DATASET*: `squad` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_ujson - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_scikit-learn - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * `_float16` - - Environment variables: - - *CM_ACCURACY_DTYPE*: `float16` - - Workflow: - * **`_float32`** (default) - - Environment variables: - - *CM_ACCURACY_DTYPE*: `float32` - - Workflow: - * `_float64` - - Environment variables: - - *CM_ACCURACY_DTYPE*: `float64` - - Workflow: - * `_int16` - - Environment variables: - - *CM_ACCURACY_DTYPE*: `int16` - - Workflow: - * `_int32` - - Environment variables: - - *CM_ACCURACY_DTYPE*: `int32` - - Workflow: - * `_int64` - - Environment variables: - - *CM_ACCURACY_DTYPE*: `int64` - - Workflow: - * `_int8` - - Environment variables: - - *CM_ACCURACY_DTYPE*: `int8` - - Workflow: - -
    - - -#### Default variations - -`_default-pycocotools,_float32,_imagenet` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--result_dir=value` → `CM_MLPERF_ACCURACY_RESULTS_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "result_dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/_cm.json)*** - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src', 'accuracy-check-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-mlperf-accuracy/_cm.json) - -___ -### Script output -`cmr "run mlperf mlcommons accuracy mlc process process-accuracy [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github/index.md b/docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github/index.md deleted file mode 100644 index ebca5bece..000000000 --- a/docs/MLPerf-benchmark-support/push-mlperf-inference-results-to-github/index.md +++ /dev/null @@ -1,150 +0,0 @@ -Automatically generated README for this automation recipe: **push-mlperf-inference-results-to-github** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=push-mlperf-inference-results-to-github,36c2ffd5df5d453a) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *push,mlperf,mlperf-inference-results,publish-results,inference,submission,github* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "push mlperf mlperf-inference-results publish-results inference submission github" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=push,mlperf,mlperf-inference-results,publish-results,inference,submission,github` - -`cm run script --tags=push,mlperf,mlperf-inference-results,publish-results,inference,submission,github [--input_flags]` - -*or* - -`cmr "push mlperf mlperf-inference-results publish-results inference submission github"` - -`cmr "push mlperf mlperf-inference-results publish-results inference submission github " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'push,mlperf,mlperf-inference-results,publish-results,inference,submission,github' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="push,mlperf,mlperf-inference-results,publish-results,inference,submission,github"``` - -#### Run this script via Docker (beta) - -`cm docker script "push mlperf mlperf-inference-results publish-results inference submission github" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--branch=value` → `CM_GIT_BRANCH=value` -* `--commit_message=value` → `CM_MLPERF_RESULTS_REPO_COMMIT_MESSAGE=value` -* `--repo_branch=value` → `CM_GIT_BRANCH=value` -* `--repo_url=value` → `CM_MLPERF_RESULTS_GIT_REPO_URL=value` -* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "branch":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_RESULTS_GIT_REPO_URL: `https://github.com/ctuning/mlperf_inference_submissions_v4.0` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/_cm.json)*** - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-sys-util,_rsync - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,mlperf,submission,dir - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` - * CM names: `--adr.['get-mlperf-submission-dir']...` - - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/_cm.json)*** - * get,git,repo - * CM names: `--adr.['get-git-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/push-mlperf-inference-results-to-github/_cm.json) - -___ -### Script output -`cmr "push mlperf mlperf-inference-results publish-results inference submission github " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-all-mlperf-models/index.md b/docs/MLPerf-benchmark-support/run-all-mlperf-models/index.md deleted file mode 100644 index 01f5427b1..000000000 --- a/docs/MLPerf-benchmark-support/run-all-mlperf-models/index.md +++ /dev/null @@ -1,237 +0,0 @@ -
    -Click here to see the table of contents. - -* [About](#about) -* [Summary](#summary) -* [Reuse this script in your project](#reuse-this-script-in-your-project) - * [ Install CM automation language](#install-cm-automation-language) - * [ Check CM script flags](#check-cm-script-flags) - * [ Run this script from command line](#run-this-script-from-command-line) - * [ Run this script from Python](#run-this-script-from-python) - * [ Run this script via GUI](#run-this-script-via-gui) - * [ Run this script via Docker (beta)](#run-this-script-via-docker-(beta)) -* [Customization](#customization) - * [ Variations](#variations) - * [ Default environment](#default-environment) -* [Script workflow, dependencies and native scripts](#script-workflow-dependencies-and-native-scripts) -* [Script output](#script-output) -* [New environment keys (filter)](#new-environment-keys-(filter)) -* [New environment keys auto-detected from customize](#new-environment-keys-auto-detected-from-customize) -* [Maintainers](#maintainers) - -
    - -*Note that this README is automatically generated - don't edit!* - -### About - -#### Summary - -* Category: *MLPerf benchmark support.* -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* CM "database" tags to find this script: *run,natively,all,mlperf-models* -* Output cached? *False* -___ -### Reuse this script in your project - -#### Install CM automation language - -* [Installation guide](https://github.com/mlcommons/ck/blob/master/docs/installation.md) -* [CM intro](https://doi.org/10.5281/zenodo.8105339) - -#### Pull CM repository with this automation - -```cm pull repo mlcommons@cm4mlops --checkout=dev``` - - -#### Run this script from command line - -1. `cm run script --tags=run,natively,all,mlperf-models[,variations] ` - -2. `cmr "run natively all mlperf-models[ variations]" ` - -* `variations` can be seen [here](#variations) - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,natively,all,mlperf-models' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,natively,all,mlperf-models"``` - -Use this [online GUI](https://cKnowledge.org/cm-gui/?tags=run,natively,all,mlperf-models) to generate CM CMD. - -#### Run this script via Docker (beta) - -`cm docker script "run natively all mlperf-models[ variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_phoenix,reference` - - Workflow: - -
    - - - * Group "**implementation**" -
    - Click here to expand this section. - - * `_deepsparse` - - Environment variables: - - *DIVISION*: `open` - - *IMPLEMENTATION*: `deepsparse` - - Workflow: - * `_intel` - - Environment variables: - - *IMPLEMENTATION*: `intel` - - Workflow: - * `_mil` - - Environment variables: - - *IMPLEMENTATION*: `mil` - - Workflow: - * `_nvidia` - - Environment variables: - - *IMPLEMENTATION*: `nvidia` - - Workflow: - * `_qualcomm` - - Environment variables: - - *IMPLEMENTATION*: `qualcomm` - - Workflow: - * `_reference` - - Environment variables: - - *IMPLEMENTATION*: `reference` - - Workflow: - * `_tflite-cpp` - - Environment variables: - - *IMPLEMENTATION*: `tflite_cpp` - - Workflow: - -
    - - - * Group "**power**" -
    - Click here to expand this section. - - * **`_performance-only`** (default) - - Workflow: - * `_power` - - Environment variables: - - *POWER*: `True` - - Workflow: - -
    - - - * Group "**sut**" -
    - Click here to expand this section. - - * `_macbookpro-m1` - - Environment variables: - - *CATEGORY*: `edge` - - *DIVISION*: `closed` - - Workflow: - * `_orin.32g` - - Environment variables: - - *CATEGORY*: `edge` - - *DIVISION*: `closed` - - Workflow: - * `_phoenix` - - Environment variables: - - *CATEGORY*: `edge,datacenter` - - *DIVISION*: `closed` - - Workflow: - * `_sapphire-rapids.24c` - - Environment variables: - - *CATEGORY*: `edge,datacenter` - - *DIVISION*: `closed` - - Workflow: - -
    - - -#### Default variations - -`_performance-only` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Script workflow, dependencies and native scripts - -
    -Click here to expand this section. - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/_cm.yaml) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/_cm.yaml) - 1. ***Run native script if exists*** - * [run-bert-macos.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-bert-macos.sh) - * [run-bert.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-bert.sh) - * [run-cpp-implementation.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-cpp-implementation.sh) - * [run-mobilenet-models.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-mobilenet-models.sh) - * [run-nvidia-4090.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-nvidia-4090.sh) - * [run-nvidia-a100.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-nvidia-a100.sh) - * [run-nvidia-t4.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-nvidia-t4.sh) - * [run-pruned-bert.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-pruned-bert.sh) - * [run-reference-models.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-reference-models.sh) - * [run-resnet50-macos.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-resnet50-macos.sh) - * [run-resnet50.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/run-resnet50.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/run-all-mlperf-models/_cm.yaml) -
    - -___ -### Script output -`cmr "run natively all mlperf-models[,variations]" -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize - -___ -### Maintainers - -* [Open MLCommons taskforce on automation and reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) \ No newline at end of file diff --git a/docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models/index.md b/docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models/index.md deleted file mode 100644 index a72c5e798..000000000 --- a/docs/MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models/index.md +++ /dev/null @@ -1,383 +0,0 @@ -Automatically generated README for this automation recipe: **run-mlperf-inference-mobilenet-models** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-inference-mobilenet-models,f21cc993a8b14a58) ]* - ---- - -## Set up - -We need to get imagenet full dataset to make image-classification submissions for MLPerf inference. Since this dataset is not publicly available via a URL please follow the instructions given [here](https://github.com/mlcommons/ck/blob/master/cm-mlops/script/get-dataset-imagenet-val/README-extra.md) to download the dataset and register in CM. - -
    -Click here to set up docker (Optional). - -### Docker Setup - -CM commands are expected to run natively but if you prefer not to modify the host system, you can do the below command to set up a docker container. - -``` -cm docker script --tags=run,mobilenet-models,_tflite,_accuracy-only \ ---adr.compiler.tags=gcc \ ---docker_cm_repo=mlcommons@cm4mlops \ ---imagenet_path=$HOME/imagenet-2012-val \ ---results_dir=$HOME/mobilenet_results \ ---submission_dir=$HOME/inference_submission_3.1 \ ---docker_skip_run_cmd -``` - -This command will build a docker container and give you an interactive shell from which you can execute the below CM run commands. -* `results_dir`, `submission_dir` and `imagenet_path` are mounted from the host system. -* `results_dir` and `submission_dir` are expected to be empty directories to be populated by the docker -* `imagenet_path` should point to the imagenet folder containing the 50000 validation images. - -
    - -## Run Commands - -Since the runs can take many hours, in case you are running remotely you can install screen as follows. You may omit "screen" from all commands if you are running on a host system. -``` -cmr "get generic-sys-util _screen" -``` -### Default tflite - - -#### Do a full accuracy run for all the models (can take almost a day) - -``` -screen cmr "run mobilenet-models _tflite _accuracy-only" \ ---adr.compiler.tags=gcc \ ---results_dir=$HOME/mobilenet_results -``` - -#### Do a full performance run for all the models (can take almost a day) -``` -screen cmr "run mobilenet-models _tflite _performance-only" \ ---adr.compiler.tags=gcc \ ---results_dir=$HOME/mobilenet_results -``` - -#### Generate README files for all the runs -``` -cmr "run mobilenet-models _tflite _populate-readme" \ ---adr.compiler.tags=gcc \ ---results_dir=$HOME/mobilenet_results -``` - -#### Generate actual submission tree - -We should use the master branch of MLCommons inference repo for the submission checker. You can use `--hw_note_extra` option to add your name to the notes. -``` -cmr "generate inference submission" \ ---results_dir=$HOME/mobilenet_results/valid_results \ ---submission_dir=$HOME/mobilenet_submission_tree \ ---clean \ ---infer_scenario_results=yes \ ---adr.compiler.tags=gcc --adr.inference-src.version=master \ ---run-checker \ ---submitter=cTuning \ ---hw_notes_extra="Result taken by NAME" -``` -* Use `--hw_name="My system name"` to give a meaningful system name. Examples can be seen [here](https://github.com/mlcommons/inference_results_v3.0/tree/main/open/cTuning/systems) - -#### Push the results to GitHub repo - -First, create a fork of [this repo](https://github.com/ctuning/mlperf_inference_submissions_v3.1/). Then run the following command after replacing `--repo_url` with your fork URL. -``` -cmr "push github mlperf inference submission" \ ---submission_dir=$HOME/mobilenet_submission_tree \ ---repo_url=https://github.com/ctuning/mlperf_inference_submissions_v3.1/ \ ---commit_message="Mobilenet results added" -``` - -Create a PR to [cTuning repo](https://github.com/ctuning/mlperf_inference_submissions_v3.1/) - -### Using ARMNN with NEON - -Follow the same procedure as above but for the first three experiment runs add `_armnn,_neon` to the tags. For example -``` -cmr "run mobilenet-models _tflite _armnn _neon _accuracy-only" \ ---adr.compiler.tags=gcc \ ---results_dir=$HOME/mobilenet_results -``` - -`results_dir` and `submission_dir` can be the same as before as results will be going to different subfolders. - -### Using ARMNN with OpenCL -Follow the same procedure as above but for the first three experiment runs add `_armnn,_opencl` to the tags. For example -``` -cmr "run mobilenet-models _tflite _armnn _opencl _accuracy-only" \ ---adr.compiler.tags=gcc \ ---results_dir=$HOME/mobilenet_results -``` - -`results_dir` and `submission_dir` can be the same as before as results will be going to different subfolders. - - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run mobilenet models image-classification mobilenet-models mlperf inference" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference` - -`cm run script --tags=run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference[,variations] [--input_flags]` - -*or* - -`cmr "run mobilenet models image-classification mobilenet-models mlperf inference"` - -`cmr "run mobilenet models image-classification mobilenet-models mlperf inference [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,mobilenet,models,image-classification,mobilenet-models,mlperf,inference"``` - -#### Run this script via Docker (beta) - -`cm docker script "run mobilenet models image-classification mobilenet-models mlperf inference[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_armnn` - - Environment variables: - - *CM_MLPERF_USE_ARMNN_LIBRARY*: `yes` - - Workflow: - * `_neon` - - Aliases: `_use-neon` - - Environment variables: - - *CM_MLPERF_USE_NEON*: `yes` - - Workflow: - * `_only-fp32` - - Environment variables: - - *CM_MLPERF_RUN_INT8*: `no` - - Workflow: - * `_only-int8` - - Environment variables: - - *CM_MLPERF_RUN_FP32*: `no` - - Workflow: - * `_opencl` - - Environment variables: - - *CM_MLPERF_USE_OPENCL*: `yes` - - Workflow: - * `_tflite,armnn` - - Environment variables: - - *CM_MLPERF_TFLITE_ARMNN*: `yes` - - Workflow: - * `_tflite,armnn,neon` - - Environment variables: - - *CM_MLPERF_TFLITE_ARMNN_NEON*: `yes` - - Workflow: - * `_tflite,armnn,opencl` - - Environment variables: - - *CM_MLPERF_TFLITE_ARMNN_OPENCL*: `yes` - - Workflow: - -
    - - - * Group "**base-framework**" -
    - Click here to expand this section. - - * **`_tflite`** (default) - - Workflow: - -
    - - - * Group "**model-selection**" -
    - Click here to expand this section. - - * **`_all-models`** (default) - - Environment variables: - - *CM_MLPERF_RUN_MOBILENETS*: `yes` - - *CM_MLPERF_RUN_EFFICIENTNETS*: `yes` - - Workflow: - * `_efficientnet` - - Environment variables: - - *CM_MLPERF_RUN_EFFICIENTNETS*: `yes` - - Workflow: - * `_mobilenet` - - Environment variables: - - *CM_MLPERF_RUN_MOBILENETS*: `yes` - - Workflow: - -
    - - - * Group "**optimization**" -
    - Click here to expand this section. - - * **`_tflite-default`** (default) - - Environment variables: - - *CM_MLPERF_TFLITE_DEFAULT_MODE*: `yes` - - Workflow: - -
    - - - * Group "**run-mode**" -
    - Click here to expand this section. - - * `_accuracy-only` - - Environment variables: - - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `no` - - *CM_MLPERF_ACCURACY_MODE*: `yes` - - *CM_MLPERF_SUBMISSION_MODE*: `no` - - Workflow: - * `_find-performance` - - Environment variables: - - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `yes` - - *CM_MLPERF_SUBMISSION_MODE*: `no` - - Workflow: - * `_performance-only` - - Environment variables: - - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `no` - - *CM_MLPERF_PERFORMANCE_MODE*: `yes` - - *CM_MLPERF_SUBMISSION_MODE*: `no` - - Workflow: - * `_populate-readme` - - Environment variables: - - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `no` - - *CM_MLPERF_POPULATE_README*: `yes` - - Workflow: - * `_submission` - - Environment variables: - - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `no` - - *CM_MLPERF_SUBMISSION_MODE*: `yes` - - Workflow: - -
    - - -#### Default variations - -`_all-models,_tflite,_tflite-default` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--find-performance=value` → `CM_MLPERF_FIND_PERFORMANCE_MODE=value` -* `--imagenet_path=value` → `IMAGENET_PATH=value` -* `--no-rerun=value` → `CM_MLPERF_NO_RERUN=value` -* `--power=value` → `CM_MLPERF_POWER=value` -* `--results_dir=value` → `CM_MLPERF_INFERENCE_RESULTS_DIR=value` -* `--submission=value` → `CM_MLPERF_SUBMISSION_MODE=value` -* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "find-performance":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_RUN_MOBILENETS: `no` -* CM_MLPERF_RUN_EFFICIENTNETS: `no` -* CM_MLPERF_NO_RERUN: `no` -* CM_MLPERF_RUN_FP32: `yes` -* CM_MLPERF_RUN_INT8: `yes` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/_cm.json)*** - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-mobilenet-models/_cm.json) - -___ -### Script output -`cmr "run mobilenet models image-classification mobilenet-models mlperf inference [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker/index.md b/docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker/index.md deleted file mode 100644 index a530b154e..000000000 --- a/docs/MLPerf-benchmark-support/run-mlperf-inference-submission-checker/index.md +++ /dev/null @@ -1,199 +0,0 @@ -Automatically generated README for this automation recipe: **run-mlperf-inference-submission-checker** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-inference-submission-checker,15d03ec2c1af4297) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker` - -`cm run script --tags=run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker[,variations] [--input_flags]` - -*or* - -`cmr "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker"` - -`cmr "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,submission-checker,mlc-submission-checker"``` - -#### Run this script via Docker (beta) - -`cm docker script "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_short-run` - - Environment variables: - - *CM_MLPERF_SHORT_RUN*: `yes` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--extra_args=value` → `CM_MLPERF_SUBMISSION_CHECKER_EXTRA_ARGS=value` -* `--extra_model_benchmark_map=value` → `CM_MLPERF_EXTRA_MODEL_MAPPING=value` -* `--input=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` -* `--power=value` → `CM_MLPERF_POWER=value` -* `--push_to_github=value` → `CM_MLPERF_RESULT_PUSH_TO_GITHUB=value` -* `--skip_compliance=value` → `CM_MLPERF_SKIP_COMPLIANCE=value` -* `--skip_power_check=value` → `CM_MLPERF_SKIP_POWER_CHECK=value` -* `--src_version=value` → `CM_MLPERF_SUBMISSION_CHECKER_VERSION=value` -* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` -* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` -* `--tar=value` → `CM_TAR_SUBMISSION_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "extra_args":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_SHORT_RUN: `no` - -
    - -#### Versions -Default version: `master` - -* `master` -* `r3.0` -* `r3.1` -* `r4.0` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/_cm.json)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src', 'submission-checker-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,generic-python-lib,_xlsxwriter - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.pyarrow - * CM names: `--adr.['pyarrow']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pandas - * CM names: `--adr.['pandas']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,mlperf,submission,dir - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` - * CM names: `--adr.['get-mlperf-submission-dir']...` - - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-submission-checker/_cm.json)*** - * publish-results,dashboard - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_DASHBOARD': ['on']}` - - CM script: [publish-results-to-dashboard](https://github.com/mlcommons/cm4mlops/tree/master/script/publish-results-to-dashboard) - * publish-results,github - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_RESULT_PUSH_TO_GITHUB': ['on']}` - * CM names: `--adr.['push-to-github']...` - - CM script: [push-mlperf-inference-results-to-github](https://github.com/mlcommons/cm4mlops/tree/master/script/push-mlperf-inference-results-to-github) - * run,tar - * Enable this dependency only if all ENV vars are set:
    -`{'CM_TAR_SUBMISSION_DIR': ['yes']}` - - CM script: [tar-my-folder](https://github.com/mlcommons/cm4mlops/tree/master/script/tar-my-folder) - -___ -### Script output -`cmr "run mlc mlcommons mlperf inference mlperf-inference submission checker submission-checker mlc-submission-checker [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-mlperf-power-client/index.md b/docs/MLPerf-benchmark-support/run-mlperf-power-client/index.md deleted file mode 100644 index d0892f842..000000000 --- a/docs/MLPerf-benchmark-support/run-mlperf-power-client/index.md +++ /dev/null @@ -1,154 +0,0 @@ -Automatically generated README for this automation recipe: **run-mlperf-power-client** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-power-client,bf6a6d0cc97b48ae) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,power,client,power-client* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run mlc mlcommons mlperf power client power-client" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run,mlc,mlcommons,mlperf,power,client,power-client` - -`cm run script --tags=run,mlc,mlcommons,mlperf,power,client,power-client [--input_flags]` - -*or* - -`cmr "run mlc mlcommons mlperf power client power-client"` - -`cmr "run mlc mlcommons mlperf power client power-client " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,mlc,mlcommons,mlperf,power,client,power-client' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,power,client,power-client"``` - -#### Run this script via Docker (beta) - -`cm docker script "run mlc mlcommons mlperf power client power-client" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--loadgen_logs_dir=value` → `CM_MLPERF_LOADGEN_LOGS_DIR=value` -* `--log_dir=value` → `CM_MLPERF_POWER_LOG_DIR=value` -* `--max_amps=value` → `CM_MLPERF_POWER_MAX_AMPS=value` -* `--max_volts=value` → `CM_MLPERF_POWER_MAX_VOLTS=value` -* `--ntp_server=value` → `CM_MLPERF_POWER_NTP_SERVER=value` -* `--port=value` → `CM_MLPERF_POWER_SERVER_PORT=value` -* `--power_server=value` → `CM_MLPERF_POWER_SERVER_ADDRESS=value` -* `--run_cmd=value` → `CM_MLPERF_RUN_CMD=value` -* `--server=value` → `CM_MLPERF_POWER_SERVER_ADDRESS=value` -* `--server_port=value` → `CM_MLPERF_POWER_SERVER_PORT=value` -* `--timestamp=value` → `CM_MLPERF_POWER_TIMESTAMP=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "loadgen_logs_dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_POWER_LOG_DIR: `logs` -* CM_MLPERF_RUN_CMD: `` -* CM_MLPERF_POWER_SERVER_ADDRESS: `localhost` -* CM_MLPERF_POWER_NTP_SERVER: `time.google.com` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/_cm.json)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlperf,power,src - * CM names: `--adr.['power-src']...` - - CM script: [get-mlperf-power-dev](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-power-dev) - * get,generic-sys-util,_ntpdate - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-client/_cm.json) - -___ -### Script output -`cmr "run mlc mlcommons mlperf power client power-client " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-mlperf-power-server/index.md b/docs/MLPerf-benchmark-support/run-mlperf-power-server/index.md deleted file mode 100644 index 99e37b374..000000000 --- a/docs/MLPerf-benchmark-support/run-mlperf-power-server/index.md +++ /dev/null @@ -1,165 +0,0 @@ -Automatically generated README for this automation recipe: **run-mlperf-power-server** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-power-server,5bc68aaf389a40bd) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,power,server,power-server* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run mlc mlcommons mlperf power server power-server" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run,mlc,mlcommons,mlperf,power,server,power-server` - -`cm run script --tags=run,mlc,mlcommons,mlperf,power,server,power-server [--input_flags]` - -*or* - -`cmr "run mlc mlcommons mlperf power server power-server"` - -`cmr "run mlc mlcommons mlperf power server power-server " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,mlc,mlcommons,mlperf,power,server,power-server' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,power,server,power-server"``` - -#### Run this script via Docker (beta) - -`cm docker script "run mlc mlcommons mlperf power server power-server" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--device_port=value` → `CM_MLPERF_POWER_DEVICE_PORT=value` -* `--device_type=value` → `CM_MLPERF_POWER_DEVICE_TYPE=value` -* `--interface_flag=value` → `CM_MLPERF_POWER_INTERFACE_FLAG=value` -* `--ntp_server=value` → `CM_MLPERF_POWER_NTP_SERVER=value` -* `--screen=value` → `CM_MLPERF_POWER_SERVER_USE_SCREEN=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "device_port":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_POWER_NTP_SERVER: `time.google.com` -* CM_MLPERF_POWER_INTERFACE_FLAG: `` -* CM_MLPERF_POWER_DEVICE_TYPE: `49` -* CM_MLPERF_POWER_SERVER_ADDRESS: `0.0.0.0` -* CM_MLPERF_POWER_SERVER_PORT: `4950` -* CM_MLPERF_POWER_DEVICE_PORT: `/dev/usbtmc0` -* CM_MLPERF_POWER_SERVER_USE_SCREEN: `no` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/_cm.json)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,mlperf,power,src - * CM names: `--adr.['power-src']...` - - CM script: [get-mlperf-power-dev](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-power-dev) - * get,mlperf,power,daemon - * CM names: `--adr.['power-damenon']...` - - CM script: [get-spec-ptd](https://github.com/mlcommons/cm4mlops/tree/master/script/get-spec-ptd) - * get,generic,sys-util,_screen - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_HOST_OS_TYPE': 'windows'}` - * CM names: `--adr.['screen']...` - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic-python-lib,_package.pypiwin32 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_HOST_OS_TYPE': 'windows'}` - * CM names: `--adr.['win32']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-power-server/_cm.json) - -___ -### Script output -`cmr "run mlc mlcommons mlperf power server power-server " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker/index.md b/docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker/index.md deleted file mode 100644 index 10f093c5f..000000000 --- a/docs/MLPerf-benchmark-support/run-mlperf-training-submission-checker/index.md +++ /dev/null @@ -1,181 +0,0 @@ -Automatically generated README for this automation recipe: **run-mlperf-training-submission-checker** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-training-submission-checker,cb5cb60ac9a74d09) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker` - -`cm run script --tags=run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker[,variations] [--input_flags]` - -*or* - -`cmr "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker"` - -`cmr "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,training,train,mlperf-training,submission,checker,submission-checker,mlc-submission-checker"``` - -#### Run this script via Docker (beta) - -`cm docker script "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_short-run` - - Environment variables: - - *CM_MLPERF_SHORT_RUN*: `yes` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--extra_args=value` → `CM_MLPERF_SUBMISSION_CHECKER_EXTRA_ARGS=value` -* `--input=value` → `CM_MLPERF_SUBMISSION_DIR=value` -* `--power=value` → `CM_MLPERF_POWER=value` -* `--push_to_github=value` → `CM_MLPERF_RESULT_PUSH_TO_GITHUB=value` -* `--skip_compliance=value` → `CM_MLPERF_SKIP_COMPLIANCE=value` -* `--skip_power_check=value` → `CM_MLPERF_SKIP_POWER_CHECK=value` -* `--src_version=value` → `CM_MLPERF_SUBMISSION_CHECKER_VERSION=value` -* `--submission_dir=value` → `CM_MLPERF_SUBMISSION_DIR=value` -* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` -* `--tar=value` → `CM_TAR_SUBMISSION_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "extra_args":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_SHORT_RUN: `no` - -
    - -#### Versions -Default version: `master` - -* `master` -* `r3.0` -* `r3.1` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/_cm.json)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src', 'submission-checker-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * install,mlperf,logging,from.src - - CM script: [install-mlperf-logging-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-mlperf-logging-from-src) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-training-submission-checker/_cm.json)*** - * publish-results,github - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_RESULT_PUSH_TO_GITHUB': ['on']}` - * CM names: `--adr.['push-to-github']...` - - CM script: [push-mlperf-inference-results-to-github](https://github.com/mlcommons/cm4mlops/tree/master/script/push-mlperf-inference-results-to-github) - * run,tar - * Enable this dependency only if all ENV vars are set:
    -`{'CM_TAR_SUBMISSION_DIR': ['yes']}` - - CM script: [tar-my-folder](https://github.com/mlcommons/cm4mlops/tree/master/script/tar-my-folder) - -___ -### Script output -`cmr "run mlc mlcommons mlperf training train mlperf-training submission checker submission-checker mlc-submission-checker [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log/index.md b/docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log/index.md deleted file mode 100644 index bd14b1c25..000000000 --- a/docs/MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log/index.md +++ /dev/null @@ -1,145 +0,0 @@ -Automatically generated README for this automation recipe: **truncate-mlperf-inference-accuracy-log** - -Category: **MLPerf benchmark support** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=truncate-mlperf-inference-accuracy-log,9d5ec20434084d14) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator` - -`cm run script --tags=run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator [--input_flags]` - -*or* - -`cmr "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator"` - -`cmr "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,mlc,mlcommons,mlperf,inference,mlperf-inference,truncation,truncator,truncate,accuracy,accuracy-log,accuracy-log-trancation,accuracy-log-truncator,mlc-accuracy-log-trancation,mlc-accuracy-log-truncator"``` - -#### Run this script via Docker (beta) - -`cm docker script "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--input=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` -* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` -* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "input":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/_cm.json)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,mlperf,submission,dir - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_INFERENCE_SUBMISSION_DIR': ['on']}` - * CM names: `--adr.['get-mlperf-submission-dir']...` - - CM script: [get-mlperf-inference-submission-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-submission-dir) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/truncate-mlperf-inference-accuracy-log/_cm.json) - -___ -### Script output -`cmr "run mlc mlcommons mlperf inference mlperf-inference truncation truncator truncate accuracy accuracy-log accuracy-log-trancation accuracy-log-truncator mlc-accuracy-log-trancation mlc-accuracy-log-truncator " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py/index.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py/index.md deleted file mode 100644 index eb4e16f3d..000000000 --- a/docs/Modular-AI-ML-application-pipeline/app-image-classification-onnx-py/index.md +++ /dev/null @@ -1,213 +0,0 @@ -Automatically generated README for this automation recipe: **app-image-classification-onnx-py** - -Category: **Modular AI/ML application pipeline** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-classification-onnx-py,3d5e908e472b417e) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *modular,python,app,image-classification,onnx* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "modular python app image-classification onnx" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=modular,python,app,image-classification,onnx` - -`cm run script --tags=modular,python,app,image-classification,onnx[,variations] [--input_flags]` - -*or* - -`cmr "modular python app image-classification onnx"` - -`cmr "modular python app image-classification onnx [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - - -#### Input Flags - -* --**input**=Path to JPEG image to classify -* --**output**=Output directory (optional) -* --**j**=Print JSON output - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "input":...} -``` -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'modular,python,app,image-classification,onnx' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="modular,python,app,image-classification,onnx"``` - -#### Run this script via Docker (beta) - -`cm docker script "modular python app image-classification onnx[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**target**" -
    - Click here to expand this section. - - * **`_cpu`** (default) - - Environment variables: - - *USE_CPU*: `True` - - Workflow: - * `_cuda` - - Environment variables: - - *USE_CUDA*: `True` - - Workflow: - -
    - - -#### Default variations - -`_cpu` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--input=value` → `CM_IMAGE=value` -* `--output=value` → `CM_APP_IMAGE_CLASSIFICATION_ONNX_PY_OUTPUT=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "input":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_BATCH_COUNT: `1` -* CM_BATCH_SIZE: `1` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,cuda - * Enable this dependency only if all ENV vars are set:
    -`{'USE_CUDA': [True]}` - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,cudnn - * Enable this dependency only if all ENV vars are set:
    -`{'USE_CUDA': [True]}` - * CM names: `--adr.['cudnn']...` - - CM script: [get-cudnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cudnn) - * get,dataset,imagenet,image-classification,original,_run-during-docker-build - - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) - * get,dataset-aux,imagenet-aux,image-classification - - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) - * get,ml-model,resnet50,_onnx,image-classification - * CM names: `--adr.['ml-model']...` - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - * get,generic-python-lib,_package.Pillow - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.opencv-python - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnxruntime - * Skip this dependenecy only if all ENV vars are set:
    -`{'USE_CUDA': [True]}` - * CM names: `--adr.['onnxruntime']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnxruntime_gpu - * Enable this dependency only if all ENV vars are set:
    -`{'USE_CUDA': [True]}` - * CM names: `--adr.['onnxruntime']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-onnx-py/_cm.yaml) - -___ -### Script output -`cmr "modular python app image-classification onnx [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_APP_IMAGE_CLASSIFICATION_ONNX_PY*` -#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp/index.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp/index.md deleted file mode 100644 index 4609e0f99..000000000 --- a/docs/Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp/index.md +++ /dev/null @@ -1,133 +0,0 @@ -Automatically generated README for this automation recipe: **app-image-classification-tf-onnx-cpp** - -Category: **Modular AI/ML application pipeline** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-classification-tf-onnx-cpp,879ed32e47074033) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *app,image-classification,cpp,tensorflow,onnx* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "app image-classification cpp tensorflow onnx" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=app,image-classification,cpp,tensorflow,onnx` - -`cm run script --tags=app,image-classification,cpp,tensorflow,onnx ` - -*or* - -`cmr "app image-classification cpp tensorflow onnx"` - -`cmr "app image-classification cpp tensorflow onnx " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'app,image-classification,cpp,tensorflow,onnx' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="app,image-classification,cpp,tensorflow,onnx"``` - -#### Run this script via Docker (beta) - -`cm docker script "app image-classification cpp tensorflow onnx" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_BATCH_COUNT: `1` -* CM_BATCH_SIZE: `1` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,gcc - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - * get,dataset,image-classification,original - - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) - * get,dataset-aux,image-classification - - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) - * get,ml-model,raw,image-classification,resnet50,_onnx,_opset-11 - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - * tensorflow,from-src - - CM script: [install-tensorflow-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-from-src) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tf-onnx-cpp/_cm.json) - -___ -### Script output -`cmr "app image-classification cpp tensorflow onnx " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py/index.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py/index.md deleted file mode 100644 index 2f2413737..000000000 --- a/docs/Modular-AI-ML-application-pipeline/app-image-classification-torch-py/index.md +++ /dev/null @@ -1,170 +0,0 @@ -Automatically generated README for this automation recipe: **app-image-classification-torch-py** - -Category: **Modular AI/ML application pipeline** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-classification-torch-py,e3986ae887b84ca8) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *app,image-classification,python,torch* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "app image-classification python torch" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=app,image-classification,python,torch` - -`cm run script --tags=app,image-classification,python,torch[,variations] ` - -*or* - -`cmr "app image-classification python torch"` - -`cmr "app image-classification python torch [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'app,image-classification,python,torch' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="app,image-classification,python,torch"``` - -#### Run this script via Docker (beta) - -`cm docker script "app image-classification python torch[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_cuda` - - Environment variables: - - *USE_CUDA*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_BATCH_COUNT: `1` -* CM_BATCH_SIZE: `1` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,dataset,imagenet,image-classification,preprocessed - - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) - * get,dataset-aux,imagenet-aux,image-classification - - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) - * get,imagenet-helper - - CM script: [get-dataset-imagenet-helper](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-helper) - * get,ml-model,image-classification,resnet50,_pytorch,_fp32 - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - * get,generic-python-lib,_torch - * Skip this dependenecy only if all ENV vars are set:
    -`{'USE_CUDA': ['yes']}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch_cuda - * Enable this dependency only if all ENV vars are set:
    -`{'USE_CUDA': ['yes']}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchvision - * Skip this dependenecy only if all ENV vars are set:
    -`{'USE_CUDA': ['yes']}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchvision_cuda - * Enable this dependency only if all ENV vars are set:
    -`{'USE_CUDA': ['yes']}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-torch-py/_cm.json) - -___ -### Script output -`cmr "app image-classification python torch [,variations]" -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py/index.md b/docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py/index.md deleted file mode 100644 index c94a3505f..000000000 --- a/docs/Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py/index.md +++ /dev/null @@ -1,158 +0,0 @@ -Automatically generated README for this automation recipe: **app-image-classification-tvm-onnx-py** - -Category: **Modular AI/ML application pipeline** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-classification-tvm-onnx-py,63080407db4d4ac4) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *app,image-classification,python,tvm-onnx* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "app image-classification python tvm-onnx" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=app,image-classification,python,tvm-onnx` - -`cm run script --tags=app,image-classification,python,tvm-onnx[,variations] ` - -*or* - -`cmr "app image-classification python tvm-onnx"` - -`cmr "app image-classification python tvm-onnx [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'app,image-classification,python,tvm-onnx' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="app,image-classification,python,tvm-onnx"``` - -#### Run this script via Docker (beta) - -`cm docker script "app image-classification python tvm-onnx[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_cuda` - - Environment variables: - - *USE_CUDA*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_llvm` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_BATCH_COUNT: `1` -* CM_BATCH_SIZE: `1` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,dataset,image-classification,original - - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) - * get,dataset-aux,image-classification - - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) - * get,raw,ml-model,image-classification,resnet50,_onnx - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - * get,generic-python-lib,_onnxruntime - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,tvm - * CM names: `--adr.['tvm']...` - - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-classification-tvm-onnx-py/_cm.json) - -___ -### Script output -`cmr "app image-classification python tvm-onnx [,variations]" -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py/index.md b/docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py/index.md deleted file mode 100644 index 14858f184..000000000 --- a/docs/Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py/index.md +++ /dev/null @@ -1,203 +0,0 @@ -Automatically generated README for this automation recipe: **app-stable-diffusion-onnx-py** - -Category: **Modular AI/ML application pipeline** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-stable-diffusion-onnx-py,4d33981ac3534b3b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *modular,python,app,stable-diffusion,onnx* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "modular python app stable-diffusion onnx" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=modular,python,app,stable-diffusion,onnx` - -`cm run script --tags=modular,python,app,stable-diffusion,onnx[,variations] [--input_flags]` - -*or* - -`cmr "modular python app stable-diffusion onnx"` - -`cmr "modular python app stable-diffusion onnx [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - - -#### Input Flags - -* --**text**=Text to generate image -* --**output**=Output directory - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "text":...} -``` -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'modular,python,app,stable-diffusion,onnx' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="modular,python,app,stable-diffusion,onnx"``` - -#### Run this script via Docker (beta) - -`cm docker script "modular python app stable-diffusion onnx[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**target**" -
    - Click here to expand this section. - - * **`_cpu`** (default) - - Environment variables: - - *USE_CPU*: `True` - - *CM_DEVICE*: `cpu` - - Workflow: - * `_cuda` - - Environment variables: - - *USE_CUDA*: `True` - - *CM_DEVICE*: `cuda:0` - - Workflow: - -
    - - -#### Default variations - -`_cpu` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--output=value` → `CM_APP_STABLE_DIFFUSION_ONNX_PY_OUTPUT=value` -* `--text=value` → `CM_APP_STABLE_DIFFUSION_ONNX_PY_TEXT=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "output":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,cuda - * Enable this dependency only if all ENV vars are set:
    -`{'USE_CUDA': [True]}` - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,cudnn - * Enable this dependency only if all ENV vars are set:
    -`{'USE_CUDA': [True]}` - * CM names: `--adr.['cudnn']...` - - CM script: [get-cudnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cudnn) - * get,generic-python-lib,_package.optimum[onnxruntime] - * Skip this dependenecy only if all ENV vars are set:
    -`{'USE_CUDA': [True]}` - * CM names: `--adr.['optimum']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.optimum[onnxruntime-gpu] - * Enable this dependency only if all ENV vars are set:
    -`{'USE_CUDA': [True]}` - * CM names: `--adr.['optimum']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.diffusers - * CM names: `--adr.['diffusers']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,ml-model,huggingface,zoo,_model-stub.runwayml/stable-diffusion-v1-5 - - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/_cm.yaml) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-stable-diffusion-onnx-py/_cm.yaml) - -___ -### Script output -`cmr "modular python app stable-diffusion onnx [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Modular-AI-ML-application-pipeline/index.md b/docs/Modular-AI-ML-application-pipeline/index.md new file mode 100644 index 000000000..5812e317e --- /dev/null +++ b/docs/Modular-AI-ML-application-pipeline/index.md @@ -0,0 +1,7 @@ +The Modular AI/ML application pipeline category contains the following scripts: + +- [app-image-classification-onnx-py](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-image-classification-onnx-py/README.md) +- [app-image-classification-tf-onnx-cpp](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-image-classification-tf-onnx-cpp/README.md) +- [app-image-classification-torch-py](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-image-classification-torch-py/README.md) +- [app-image-classification-tvm-onnx-py](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-image-classification-tvm-onnx-py/README.md) +- [app-stable-diffusion-onnx-py](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-stable-diffusion-onnx-py/README.md) diff --git a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy/index.md b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy/index.md deleted file mode 100644 index 64b91c4e3..000000000 --- a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-dummy/index.md +++ /dev/null @@ -1,360 +0,0 @@ -Automatically generated README for this automation recipe: **app-mlperf-inference-dummy** - -Category: **Modular MLPerf benchmarks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-dummy,5b71627383a94576) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "reproduce mlcommons mlperf inference harness dummy-harness dummy" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy` - -`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy[,variations] [--input_flags]` - -*or* - -`cmr "reproduce mlcommons mlperf inference harness dummy-harness dummy"` - -`cmr "reproduce mlcommons mlperf inference harness dummy-harness dummy [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="reproduce,mlcommons,mlperf,inference,harness,dummy-harness,dummy"``` - -#### Run this script via Docker (beta) - -`cm docker script "reproduce mlcommons mlperf inference harness dummy-harness dummy[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *Internal group (variations should not be selected manually)* -
    - Click here to expand this section. - - * `_bert_` - - Workflow: - * `_gptj_` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,ml-model,gptj - * CM names: `--adr.['gptj-model']...` - - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) - * get,dataset,cnndm,_validation - - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) - * `_llama2-70b_` - - Workflow: - -
    - - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_pytorch,cpu` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_torch - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_pytorch,cuda` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_torch_cuda - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_singlestream,resnet50` - - Workflow: - * `_singlestream,retinanet` - - Workflow: - -
    - - - * Group "**backend**" -
    - Click here to expand this section. - - * **`_pytorch`** (default) - - Environment variables: - - *CM_MLPERF_BACKEND*: `pytorch` - - Workflow: - -
    - - - * Group "**batch-size**" -
    - Click here to expand this section. - - * `_bs.#` - - Workflow: - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * **`_cpu`** (default) - - Environment variables: - - *CM_MLPERF_DEVICE*: `cpu` - - Workflow: - * `_cuda` - - Environment variables: - - *CM_MLPERF_DEVICE*: `gpu` - - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` - - Workflow: - -
    - - - * Group "**loadgen-scenario**" -
    - Click here to expand this section. - - * `_multistream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` - - Workflow: - * `_offline` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` - - Workflow: - * `_server` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` - - Workflow: - * `_singlestream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` - - Workflow: - -
    - - - * Group "**model**" -
    - Click here to expand this section. - - * `_bert-99` - - Environment variables: - - *CM_MODEL*: `bert-99` - - *CM_SQUAD_ACCURACY_DTYPE*: `float32` - - Workflow: - * `_bert-99.9` - - Environment variables: - - *CM_MODEL*: `bert-99.9` - - Workflow: - * `_gptj-99` - - Environment variables: - - *CM_MODEL*: `gptj-99` - - *CM_SQUAD_ACCURACY_DTYPE*: `float32` - - Workflow: - * `_gptj-99.9` - - Environment variables: - - *CM_MODEL*: `gptj-99.9` - - Workflow: - * `_llama2-70b-99` - - Environment variables: - - *CM_MODEL*: `llama2-70b-99` - - Workflow: - * `_llama2-70b-99.9` - - Environment variables: - - *CM_MODEL*: `llama2-70b-99.9` - - Workflow: - * **`_resnet50`** (default) - - Environment variables: - - *CM_MODEL*: `resnet50` - - Workflow: - * `_retinanet` - - Environment variables: - - *CM_MODEL*: `retinanet` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * `_fp16` - - Workflow: - * `_fp32` - - Workflow: - * `_uint8` - - Workflow: - -
    - - -#### Default variations - -`_cpu,_pytorch,_resnet50` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` -* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` -* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` -* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` -* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` -* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` -* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` -* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` -* `--rerun=value` → `CM_RERUN=value` -* `--results_repo=value` → `CM_MLPERF_INFERENCE_RESULTS_REPO=value` -* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` -* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` -* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` -* `--skip_preprocess=value` → `CM_SKIP_PREPROCESS_DATASET=value` -* `--skip_preprocessing=value` → `CM_SKIP_PREPROCESS_DATASET=value` -* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` -* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` -* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "count":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_LOADGEN_SCENARIO: `Offline` -* CM_MLPERF_LOADGEN_MODE: `performance` -* CM_SKIP_PREPROCESS_DATASET: `no` -* CM_SKIP_MODEL_DOWNLOAD: `no` -* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `dummy_harness` -* CM_MLPERF_SKIP_RUN: `no` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,mlcommons,inference,loadgen - * CM names: `--adr.['inference-loadgen']...` - - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) - * generate,user-conf,mlperf,inference - * CM names: `--adr.['user-conf-generator']...` - - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) - * get,generic-python-lib,_mlperf_logging - * CM names: `--adr.['mlperf-logging']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,git,repo - * CM names: `--adr.inference-results inference-code...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/_cm.yaml) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-dummy/_cm.yaml)*** - * benchmark-mlperf - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': ['yes', True]}` - * CM names: `--adr.['runner', 'mlperf-runner']...` - - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) - * save,mlperf,inference,state - * CM names: `--adr.['save-mlperf-inference-state']...` - - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) - -___ -### Script output -`cmr "reproduce mlcommons mlperf inference harness dummy-harness dummy [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -* `CM_HW_NAME` -* `CM_IMAGENET_ACCURACY_DTYPE` -* `CM_MAX_EXAMPLES` -* `CM_MLPERF_*` -* `CM_ML_MODEL_*` -* `CM_SQUAD_ACCURACY_DTYPE` -#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel/index.md b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel/index.md deleted file mode 100644 index 3278fbf44..000000000 --- a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-intel/index.md +++ /dev/null @@ -1,621 +0,0 @@ -Automatically generated README for this automation recipe: **app-mlperf-inference-intel** - -Category: **Modular MLPerf benchmarks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-intel,c05a90433bb04cc1) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel` - -`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel[,variations] [--input_flags]` - -*or* - -`cmr "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel"` - -`cmr "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="reproduce,mlcommons,mlperf,inference,harness,intel-harness,intel,intel-harness,intel"``` - -#### Run this script via Docker (beta) - -`cm docker script "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *Internal group (variations should not be selected manually)* -
    - Click here to expand this section. - - * `_bert_` - - Environment variables: - - *CM_BENCHMARK*: `STANDALONE_BERT` - - *dataset_squad_tokenized_max_seq_length*: `384` - - *loadgen_buffer_size*: `10833` - - *loadgen_dataset_size*: `10833` - - Workflow: - * `_build-harness,bert_` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-sys-util,_rsync - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,dataset,original,squad - * CM names: `--adr.['squad-original']...` - - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) - * get,ml-model,bert-large,_pytorch,_int8 - * CM names: `--adr.['bert-large', 'ml-model']...` - - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) - * get,generic-python-lib,_package.tokenization - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_calibration,gptj_` - - Workflow: - * `_gptj_` - - Environment variables: - - *CM_BENCHMARK*: `STANDALONE_GPTJ` - - Workflow: - * `_int4,gptj_` - - Environment variables: - - *INTEL_GPTJ_INT4*: `yes` - - Workflow: - * `_int8,gptj_` - - Environment variables: - - *INTEL_GPTJ_INT4*: `no` - - Workflow: - -
    - - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_bert_,network-client` - - Environment variables: - - *CM_BENCHMARK*: `NETWORK_BERT_CLIENT` - - Workflow: - * `_bert_,network-server` - - Environment variables: - - *CM_BENCHMARK*: `NETWORK_BERT_SERVER` - - Workflow: - * `_bert_,pytorch` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,conda,_name.bert-pt - - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) - * install,llvm,src,_tag.llvmorg-15.0.7,_runtimes.libcxx:libcxxabi:openmp,_clang,_release,_for-intel-mlperf-inference-v3.1-bert - - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) - * get,generic-sys-util,_libffi7 - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,conda-package,_package.python - * CM names: `--adr.['conda-package', 'python']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic,conda-package,_package.ncurses,_source.conda-forge - * CM names: `--adr.['conda-package', 'ncurses']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic-sys-util,_numactl - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,conda-package,_package.jemalloc,_source.conda-forge - * CM names: `--adr.['conda-package', 'jemalloc']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,pytorch,from.src,_for-intel-mlperf-inference-v3.1-bert - - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) - * install,onednn,from.src,_for-intel-mlperf-inference-v3.1-bert - - CM script: [install-onednn-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-onednn-from-src) - * install,transformers,from.src,_for-intel-mlperf-inference-v3.1-bert - - CM script: [install-transformers-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-transformers-from-src) - * `_bs.#` - - Environment variables: - - *ML_MLPERF_MODEL_BATCH_SIZE*: `#` - - Workflow: - * `_gptj_,build-harness` - - Workflow: - * `_gptj_,pytorch` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,conda,_name.gptj-pt - - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) - * get,python,_conda.gptj-pt - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * install,llvm,src,_tag.llvmorg-16.0.6,_clang,_release,_for-intel-mlperf-inference-v3.1-gptj - - CM script: [install-llvm-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-llvm-src) - * get,generic,conda-package,_package.ncurses,_source.conda-forge - * CM names: `--adr.['conda-package', 'ncurses']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * get,generic-sys-util,_numactl - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,conda-package,_package.jemalloc,_source.conda-forge - * CM names: `--adr.['conda-package', 'jemalloc']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * install,ipex,from.src,_for-intel-mlperf-inference-v3.1-gptj - - CM script: [install-ipex-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-ipex-from-src) - * get,generic,conda-package,_package.ninja - * Enable this dependency only if all ENV vars are set:
    -`{'INTEL_GPTJ_INT4': ['yes']}` - * CM names: `--adr.['conda-package', 'ninja']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * install,tpp-pex,from.src,_for-intel-mlperf-inference-v3.1-gptj - * Enable this dependency only if all ENV vars are set:
    -`{'INTEL_GPTJ_INT4': ['yes']}` - - CM script: [install-tpp-pytorch-extension](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tpp-pytorch-extension) - * get,generic-python-lib,_package.transformers - * CM names: `--adr.['pip-package', 'transformers']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,mlcommons,inference,loadgen,_custom-python - * CM names: `--adr.['inference-loadgen']...` - - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) - * get,ml-model,large-language-model,gptj - * CM names: `--adr.['ml-model', 'gptj-model', 'gpt-j-model']...` - - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) - * get,generic-python-lib,_package.datasets - * CM names: `--adr.['pip-package', 'datasets']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.accelerate - * CM names: `--adr.['pip-package', 'accelerate']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_custom-python,_package.torch,_url.git+https://github.com/pytorch/pytorch.git@927dc662386af052018212c7d01309a506fc94cd - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_int4,gptj_,build-harness` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * reproduce,mlperf,inference,intel,harness,_calibration - * CM names: `--adr.['calibration']...` - - CM script: [app-mlperf-inference-intel](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-intel) - * get,generic-python-lib,_package.optimum - * CM names: `--adr.['pip-package', 'optimum']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_resnet50,uint8` - - Environment variables: - - *CM_IMAGENET_ACCURACY_DTYPE*: `int8` - - Workflow: - * `_sapphire-rapids.112c,gptj-99,offline,int4` - - Environment variables: - - *NUM_PROC*: `4` - - *KMP_BLOCKTIME*: `1` - - *WORKERS_PER_PROC*: `3` - - Workflow: - * `_sapphire-rapids.112c,gptj-99,offline,int8` - - Environment variables: - - *KMP_BLOCKTIME*: `1` - - *WORKERS_PER_PROC*: `2` - - Workflow: - * `_sapphire-rapids.112c,gptj-99,server,int4` - - Environment variables: - - *KMP_BLOCKTIME*: `1` - - *WORKERS_PER_PROC*: `4` - - Workflow: - * `_sapphire-rapids.112c,gptj-99,server,int8` - - Environment variables: - - *KMP_BLOCKTIME*: `1` - - *WORKERS_PER_PROC*: `2` - - Workflow: - * `_sapphire-rapids.24c,bert-99` - - Environment variables: - - *WORKERS_PER_PROC*: `1` - - Workflow: - * `_sapphire-rapids.24c,gptj-99,offline,int4` - - Environment variables: - - *KMP_BLOCKTIME*: `10` - - *WORKERS_PER_PROC*: `1` - - Workflow: - * `_sapphire-rapids.24c,gptj-99,offline,int8` - - Environment variables: - - *KMP_BLOCKTIME*: `10` - - *WORKERS_PER_PROC*: `1` - - Workflow: - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * **`_cpu`** (default) - - Environment variables: - - *CM_MLPERF_DEVICE*: `cpu` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * **`_pytorch`** (default) - - Environment variables: - - *CM_MLPERF_BACKEND*: `pytorch` - - *CM_MLPERF_BACKEND_LIB_NAMESPEC*: `pytorch` - - Workflow: - -
    - - - * Group "**loadgen-batchsize**" -
    - Click here to expand this section. - - * `_batch_size.#` - - Environment variables: - - *CM_MLPERF_LOADGEN_BATCH_SIZE*: `#` - - Workflow: - -
    - - - * Group "**loadgen-scenario**" -
    - Click here to expand this section. - - * `_multistream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` - - Workflow: - * `_offline` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` - - Workflow: - * `_server` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` - - Workflow: - * `_singlestream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` - - Workflow: - -
    - - - * Group "**model**" -
    - Click here to expand this section. - - * `_bert-99` - - Environment variables: - - *CM_MODEL*: `bert-99` - - *CM_SQUAD_ACCURACY_DTYPE*: `float32` - - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` - - Workflow: - * `_bert-99.9` - - Environment variables: - - *CM_MODEL*: `bert-99.9` - - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` - - Workflow: - * `_gptj-99` - - Environment variables: - - *CM_MODEL*: `gptj-99` - - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` - - Workflow: - * `_gptj-99.9` - - Environment variables: - - *CM_MODEL*: `gptj-99.9` - - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` - - Workflow: - * **`_resnet50`** (default) - - Environment variables: - - *CM_MODEL*: `resnet50` - - *dataset_imagenet_preprocessed_input_square_side*: `224` - - *ml_model_has_background_class*: `YES` - - *ml_model_image_height*: `224` - - *loadgen_buffer_size*: `1024` - - *loadgen_dataset_size*: `50000` - - *CM_BENCHMARK*: `STANDALONE_CLASSIFICATION` - - Workflow: - * `_retinanet` - - Environment variables: - - *CM_MODEL*: `retinanet` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/6617981/files/resnext50_32x4d_fpn.pth` - - *dataset_imagenet_preprocessed_input_square_side*: `224` - - *ml_model_image_height*: `800` - - *ml_model_image_width*: `800` - - *loadgen_buffer_size*: `64` - - *loadgen_dataset_size*: `24576` - - *CM_BENCHMARK*: `STANDALONE_OBJECT_DETECTION` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - - * Group "**network-mode**" -
    - Click here to expand this section. - - * `_network-server` - - Environment variables: - - *CM_MLPERF_NETWORK_RUN_MODE*: `network-server` - - Workflow: - * **`_standalone`** (default) - - Environment variables: - - *CM_MLPERF_NETWORK_RUN_MODE*: `standalone` - - Workflow: - -
    - - - * Group "**network-run-mode**" -
    - Click here to expand this section. - - * `_network-client` - - Environment variables: - - *CM_MLPERF_NETWORK_RUN_MODE*: `network-client` - - Workflow: - -
    - - - * Group "**power-mode**" -
    - Click here to expand this section. - - * `_maxn` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_MAXN*: `True` - - Workflow: - * `_maxq` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_MAXQ*: `True` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * `_fp32` - - Environment variables: - - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` - - Workflow: - * `_int4` - - Workflow: - * `_uint8` - - Workflow: - -
    - - - * Group "**run-mode**" -
    - Click here to expand this section. - - * `_build-harness` - - Environment variables: - - *CM_LOCAL_MLPERF_INFERENCE_INTEL_RUN_MODE*: `build_harness` - - Workflow: - * `_calibration` - - Environment variables: - - *CM_LOCAL_MLPERF_INFERENCE_INTEL_RUN_MODE*: `calibration` - - Workflow: - * **`_run-harness`** (default) - - Environment variables: - - *CM_LOCAL_MLPERF_INFERENCE_INTEL_RUN_MODE*: `run_harness` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * reproduce,mlperf,inference,intel,harness,_build-harness - * CM names: `--adr.['build-harness']...` - - CM script: [app-mlperf-inference-intel](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-intel) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * generate,user-conf,mlperf,inference - * CM names: `--adr.['user-conf-generator']...` - - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) - -
    - - - * Group "**sut**" -
    - Click here to expand this section. - - * `_sapphire-rapids.112c` - - Environment variables: - - *WARMUP*: ` --warmup` - - Workflow: - * `_sapphire-rapids.24c` - - Workflow: - -
    - - -#### Default variations - -`_cpu,_pytorch,_resnet50,_run-harness,_standalone` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` -* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` -* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` -* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` -* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` -* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` -* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` -* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` -* `--rerun=value` → `CM_RERUN=value` -* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` -* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` -* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` -* `--skip_preprocess=value` → `CM_SKIP_PREPROCESS_DATASET=value` -* `--skip_preprocessing=value` → `CM_SKIP_PREPROCESS_DATASET=value` -* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` -* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` -* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "count":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_BATCH_COUNT: `1` -* CM_BATCH_SIZE: `1` -* CM_FAST_COMPILATION: `yes` -* CM_MLPERF_LOADGEN_SCENARIO: `Offline` -* CM_MLPERF_LOADGEN_MODE: `performance` -* CM_SKIP_PREPROCESS_DATASET: `no` -* CM_SKIP_MODEL_DOWNLOAD: `no` -* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `intel` -* CM_MLPERF_SKIP_RUN: `no` -* verbosity: `1` -* loadgen_trigger_cold_run: `0` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,generic-python-lib,_mlperf_logging - * CM names: `--adr.['mlperf-logging']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,ml-model,resnet50,_fp32,_onnx,_from-tf - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - * CM names: `--adr.['resnet50-model', 'ml-model']...` - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - * compile,intel,model,_resnet50 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - * CM names: `--adr.['resnet50-compiler']...` - - *Warning: no scripts found* - * get,dataset,imagenet,preprocessed,_for.resnet50,_NHWC,_full - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - * CM names: `--adr.['imagenet-preprocessed', 'dataset-preprocessed']...` - - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) - * compile,intel,model,_retinanet - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - * CM names: `--adr.['retinanet-compiler']...` - - *Warning: no scripts found* - * get,dataset,preprocessed,openimages,_for.retinanet.onnx,_NCHW,_validation,_custom-annotations - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - * CM names: `--adr.['openimages-preprocessed', 'dataset-preprocessed']...` - - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) - * get,mlperf,inference,results,_ctuning - * CM names: `--adr.inference-results...` - - CM script: [get-mlperf-inference-results](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/_cm.yaml) - 1. ***Run native script if exists*** - * [run_bert_harness.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/run_bert_harness.sh) - * [run_gptj_harness.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/run_gptj_harness.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-intel/_cm.yaml)*** - * benchmark-mlperf - * Enable this dependency only if all ENV vars are set:
    -`{'CM_LOCAL_MLPERF_INFERENCE_INTEL_RUN_MODE': ['run_harness']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': ['yes', True]}` - * CM names: `--adr.['runner', 'mlperf-runner']...` - - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) - * save,mlperf,inference,state - * CM names: `--adr.['save-mlperf-inference-state']...` - - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) - -___ -### Script output -`cmr "reproduce mlcommons mlperf inference harness intel-harness intel intel-harness intel [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm/index.md b/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm/index.md deleted file mode 100644 index 6205c2108..000000000 --- a/docs/Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm/index.md +++ /dev/null @@ -1,775 +0,0 @@ -Automatically generated README for this automation recipe: **app-mlperf-inference-qualcomm** - -Category: **Modular MLPerf benchmarks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-qualcomm,eef1aca5d7c0470e) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt` - -`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt[,variations] [--input_flags]` - -*or* - -`cmr "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt"` - -`cmr "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="reproduce,mlcommons,mlperf,inference,harness,qualcomm-harness,qualcomm,kilt-harness,kilt"``` - -#### Run this script via Docker (beta) - -`cm docker script "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *Internal group (variations should not be selected manually)* -
    - Click here to expand this section. - - * `_bert_` - - Environment variables: - - *CM_BENCHMARK*: `STANDALONE_BERT` - - *kilt_model_name*: `bert` - - *kilt_model_seq_length*: `384` - - *kilt_model_bert_variant*: `BERT_PACKED` - - *kilt_input_format*: `INT64,1,384:INT64,1,8:INT64,1,384:INT64,1,384` - - *kilt_output_format*: `FLOAT32,1,384:FLOAT32,1,384` - - *dataset_squad_tokenized_max_seq_length*: `384` - - *loadgen_buffer_size*: `10833` - - *loadgen_dataset_size*: `10833` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_transformers - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_safetensors - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnx - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_activation-count.#` - - Environment variables: - - *CM_MLPERF_QAIC_ACTIVATION_COUNT*: `#` - - Workflow: - * `_bert-99,offline` - - Workflow: - * `_bert-99,qaic` - - Environment variables: - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `https://github.com/mlcommons/inference_results_v3.1/blob/main/closed/Qualcomm/calibration.md` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int32` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8,fp16` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * compile,qaic,model,_bert-99,_pc.99.9980 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': [True]}` - * CM names: `--adr.['qaic-model-compiler', 'bert-99-compiler']...` - - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) - * `_bert-99.9,offline` - - Workflow: - * `_bert-99.9,qaic` - - Environment variables: - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `https://github.com/mlcommons/inference_results_v3.1/blob/main/closed/Qualcomm/calibration.md` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int32` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp16` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * compile,qaic,model,_bert-99.9 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': [True]}` - * CM names: `--adr.['qaic-model-compiler', 'bert-99.9-compiler']...` - - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) - * `_bert_,network-client` - - Environment variables: - - *CM_BENCHMARK*: `NETWORK_BERT_CLIENT` - - Workflow: - * `_bert_,network-server` - - Environment variables: - - *CM_BENCHMARK*: `NETWORK_BERT_SERVER` - - Workflow: - * `_bert_,qaic` - - Environment variables: - - *kilt_model_batch_size*: `1` - - *kilt_input_format*: `UINT32,1,384:UINT32,1,8:UINT32,1,384:UINT32,1,384` - - *kilt_input_formata*: `UINT32,1,384:UINT32,1,384:UINT32,1,384` - - *kilt_output_formatia*: `UINT8,1,384:UINT8,1,384` - - *kilt_device_qaic_skip_stage*: `convert` - - Workflow: - * `_bert_,singlestream` - - Environment variables: - - *kilt_model_batch_size*: `1` - - Workflow: - * `_dl2q.24xlarge,bert-99,offline` - - Environment variables: - - *qaic_activation_count*: `14` - - Workflow: - * `_dl2q.24xlarge,bert-99.9,offline` - - Environment variables: - - *qaic_activation_count*: `7` - - Workflow: - * `_dl2q.24xlarge,bert-99.9,server` - - Environment variables: - - *qaic_activation_count*: `7` - - Workflow: - * `_dl2q.24xlarge,resnet50,multistream` - - Environment variables: - - *qaic_activation_count*: `1` - - Workflow: - * `_dl2q.24xlarge,resnet50,offline` - - Environment variables: - - *qaic_activation_count*: `3` - - Workflow: - * `_dl2q.24xlarge,resnet50,server` - - Environment variables: - - *qaic_activation_count*: `3` - - Workflow: - * `_dl2q.24xlarge,retinanet,offline` - - Environment variables: - - *qaic_activation_count*: `14` - - Workflow: - * `_dl2q.24xlarge,retinanet,server` - - Environment variables: - - *qaic_activation_count*: `14` - - Workflow: - * `_dl2q.24xlarge,singlestream` - - Environment variables: - - *CM_QAIC_DEVICES*: `0` - - *qaic_activation_count*: `1` - - Workflow: - * `_num-devices.4` - - Environment variables: - - *CM_QAIC_DEVICES*: `0,1,2,3` - - Workflow: - * `_pro` - - Environment variables: - - *qaic_queue_length*: `10` - - Workflow: - * `_pro,num-devices.4,bert-99,offline` - - Environment variables: - - *qaic_activation_count*: `16` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * set,device,qaic,_vc.15 - - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) - * `_pro,num-devices.4,bert-99,server` - - Environment variables: - - *qaic_activation_count*: `16` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * set,device,qaic,_vc.13 - - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) - * `_pro,num-devices.4,bert-99.9,offline` - - Environment variables: - - *qaic_activation_count*: `8` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * set,device,qaic,_vc.13 - - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) - * `_pro,num-devices.4,bert-99.9,server` - - Environment variables: - - *qaic_activation_count*: `8` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * set,device,qaic,_vc.13 - - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) - * `_pro,num-devices.4,resnet50,offline` - - Environment variables: - - *qaic_activation_count*: `4` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * set,device,qaic,_vc.16 - - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) - * `_pro,num-devices.4,resnet50,server` - - Environment variables: - - *qaic_activation_count*: `4` - - Workflow: - * `_pro,num-devices.4,retinanet,offline` - - Environment variables: - - *qaic_activation_count*: `16` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * set,device,qaic,_vc.17 - - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) - * `_pro,num-devices.4,retinanet,server` - - Environment variables: - - *qaic_activation_count*: `16` - - Workflow: - * `_pro,num-devices.4,singlestream` - - Environment variables: - - *CM_QAIC_DEVICES*: `0` - - *qaic_activation_count*: `1` - - Workflow: - * `_rb6,bert-99,offline` - - Environment variables: - - *qaic_activation_count*: `9` - - Workflow: - * `_rb6,resnet50,multistream` - - Environment variables: - - *qaic_activation_count*: `2` - - Workflow: - * `_rb6,resnet50,offline` - - Environment variables: - - *qaic_activation_count*: `2` - - Workflow: - * `_rb6,retinanet,multistream` - - Environment variables: - - *qaic_activation_count*: `8` - - Workflow: - * `_rb6,retinanet,offline` - - Environment variables: - - *qaic_activation_count*: `9` - - Workflow: - * `_rb6,singlestream` - - Environment variables: - - *qaic_activation_count*: `1` - - Workflow: - * `_resnet50,uint8` - - Environment variables: - - *kilt_input_format*: `UINT8,-1,224,224,3` - - *kilt_device_qaic_skip_stage*: `convert` - - *CM_IMAGENET_ACCURACY_DTYPE*: `int8` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `https://github.com/mlcommons/inference_results_v3.1/blob/main/closed/Qualcomm/calibration.md` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` - - Workflow: - * `_retinanet,qaic,uint8` - - Environment variables: - - *kilt_device_qaic_skip_stage*: `convert` - - *kilt_input_format*: `UINT8,1,3,800,800` - - *kilt_output_format*: `INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,1000:INT8,1,4,1000:INT8,14,1000:INT8,1,4,1000:INT8,1,4,1000:INT8,1,4,1000` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `https://github.com/mlcommons/inference_results_v3.1/blob/main/closed/Qualcomm/calibration.md` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` - - Workflow: - * `_singlestream,resnet50` - - Workflow: - * `_singlestream,retinanet` - - Workflow: - -
    - - - * Group "**batch-size**" -
    - Click here to expand this section. - - * `_bs.#` - - Environment variables: - - *kilt_model_batch_size*: `#` - - Workflow: - * `_bs.0` - - Environment variables: - - *kilt_model_batch_size*: `1` - - Workflow: - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * **`_cpu`** (default) - - Environment variables: - - *CM_MLPERF_DEVICE*: `cpu` - - *kilt_backend_type*: `cpu` - - Workflow: - * `_cuda` - - Environment variables: - - *CM_MLPERF_DEVICE*: `gpu` - - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` - - *kilt_backend_type*: `gpu` - - Workflow: - * `_qaic` - - Environment variables: - - *CM_MLPERF_DEVICE*: `qaic` - - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `QAic` - - *kilt_backend_type*: `qaic` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,qaic,platform,sdk - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': [True]}` - - CM script: [get-qaic-platform-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-qaic-platform-sdk) - * get,lib,protobuf,_tag.v3.11.4 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': [True]}` - - CM script: [get-lib-protobuf](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-protobuf) - * set,device,mode,qaic - * Enable this dependency only if all ENV vars are set:
    -`{'CM_QAIC_VC': 'on'}` - - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) - * set,device,mode,qaic,_ecc - * Enable this dependency only if all ENV vars are set:
    -`{'CM_QAIC_ECC': 'yes'}` - - CM script: [set-device-settings-qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/set-device-settings-qaic) - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * `_glow` - - Environment variables: - - *device*: `qaic` - - *CM_MLPERF_BACKEND*: `glow` - - *CM_MLPERF_BACKEND_LIB_NAMESPEC*: `QAic` - - Workflow: - * **`_onnxruntime`** (default) - - Environment variables: - - *device*: `onnxrt` - - *CM_MLPERF_BACKEND*: `onnxruntime` - - *CM_MLPERF_BACKEND_LIB_NAMESPEC*: `onnxruntime` - - Workflow: - * `_tensorrt` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tensorrt` - - *device*: `tensorrt` - - *CM_MLPERF_BACKEND_NAME*: `TensorRT` - - Workflow: - -
    - - - * Group "**loadgen-batch-size**" -
    - Click here to expand this section. - - * `_loadgen-batch-size.#` - - Environment variables: - - *CM_MLPERF_LOADGEN_BATCH_SIZE*: `#` - - Workflow: - -
    - - - * Group "**loadgen-scenario**" -
    - Click here to expand this section. - - * `_multistream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` - - Workflow: - * `_offline` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` - - Workflow: - * `_server` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` - - Workflow: - * `_singlestream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` - - Workflow: - -
    - - - * Group "**model**" -
    - Click here to expand this section. - - * `_bert-99` - - Environment variables: - - *CM_MODEL*: `bert-99` - - *CM_SQUAD_ACCURACY_DTYPE*: `float32` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` - - Workflow: - * `_bert-99.9` - - Environment variables: - - *CM_MODEL*: `bert-99.9` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` - - Workflow: - * **`_resnet50`** (default) - - Environment variables: - - *CM_MODEL*: `resnet50` - - *kilt_model_name*: `resnet50` - - *kilt_input_count*: `1` - - *kilt_output_count*: `1` - - *kilt_input_format*: `FLOAT32,-1,224,224,3` - - *kilt_output_format*: `INT64,-1` - - *dataset_imagenet_preprocessed_input_square_side*: `224` - - *ml_model_has_background_class*: `YES` - - *ml_model_image_height*: `224` - - *loadgen_buffer_size*: `1024` - - *loadgen_dataset_size*: `50000` - - *CM_BENCHMARK*: `STANDALONE_CLASSIFICATION` - - Workflow: - * `_retinanet` - - Environment variables: - - *CM_MODEL*: `retinanet` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/6617981/files/resnext50_32x4d_fpn.pth` - - *kilt_model_name*: `retinanet` - - *kilt_input_count*: `1` - - *kilt_model_max_detections*: `600` - - *kilt_output_count*: `1` - - *kilt_input_format*: `FLOAT32,-1,3,800,800` - - *kilt_output_format*: `INT64,-1` - - *dataset_imagenet_preprocessed_input_square_side*: `224` - - *ml_model_image_height*: `800` - - *ml_model_image_width*: `800` - - *loadgen_buffer_size*: `64` - - *loadgen_dataset_size*: `24576` - - *CM_BENCHMARK*: `STANDALONE_OBJECT_DETECTION` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_Pillow - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchvision - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_opencv-python - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pycocotools - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - - * Group "**nsp**" -
    - Click here to expand this section. - - * `_nsp.#` - - Workflow: - * `_nsp.14` - - Workflow: - * `_nsp.16` - - Workflow: - -
    - - - * Group "**power-mode**" -
    - Click here to expand this section. - - * `_maxn` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_MAXN*: `True` - - Workflow: - * `_maxq` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_MAXQ*: `True` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * `_fp16` - - Workflow: - * `_fp32` - - Environment variables: - - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` - - Workflow: - * `_uint8` - - Workflow: - -
    - - - * Group "**run-mode**" -
    - Click here to expand this section. - - * `_network-client` - - Environment variables: - - *CM_RUN_MODE*: `network-client` - - Workflow: - * `_network-server` - - Environment variables: - - *CM_RUN_MODE*: `network-server` - - Workflow: - * **`_standalone`** (default) - - Environment variables: - - *CM_RUN_MODE*: `standalone` - - Workflow: - -
    - - - * Group "**sut**" -
    - Click here to expand this section. - - * `_dl2q.24xlarge` - - Environment variables: - - *CM_QAIC_DEVICES*: `0,1,2,3,4,5,6,7` - - *qaic_queue_length*: `4` - - Workflow: - * `_rb6` - - Environment variables: - - *CM_QAIC_DEVICES*: `0` - - *qaic_queue_length*: `6` - - Workflow: - -
    - - -#### Default variations - -`_cpu,_onnxruntime,_resnet50,_standalone` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` -* `--devices=value` → `CM_QAIC_DEVICES=value` -* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` -* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` -* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` -* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` -* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` -* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` -* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` -* `--rerun=value` → `CM_RERUN=value` -* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` -* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` -* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` -* `--skip_preprocess=value` → `CM_SKIP_PREPROCESS_DATASET=value` -* `--skip_preprocessing=value` → `CM_SKIP_PREPROCESS_DATASET=value` -* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` -* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` -* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "count":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_BATCH_COUNT: `1` -* CM_BATCH_SIZE: `1` -* CM_FAST_COMPILATION: `yes` -* CM_MLPERF_LOADGEN_SCENARIO: `Offline` -* CM_MLPERF_LOADGEN_MODE: `performance` -* CM_SKIP_PREPROCESS_DATASET: `no` -* CM_SKIP_MODEL_DOWNLOAD: `no` -* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `kilt` -* CM_MLPERF_SKIP_RUN: `no` -* CM_KILT_REPO_URL: `https://github.com/GATEOverflow/kilt-mlperf` -* CM_QAIC_DEVICES: `0` -* kilt_max_wait_abs: `10000` -* verbosity: `0` -* loadgen_trigger_cold_run: `0` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,git,repo - * CM names: `--adr.['kilt-repo']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,mlcommons,inference,loadgen - * CM names: `--adr.['inference-loadgen']...` - - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) - * generate,user-conf,mlperf,inference - * CM names: `--adr.['user-conf-generator']...` - - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) - * get,generic-python-lib,_mlperf_logging - * CM names: `--adr.['mlperf-logging']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,ml-model,resnet50,_fp32,_onnx,_from-tf - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_DEVICE': ['qaic']}` - * CM names: `--adr.['resnet50-model', 'ml-model']...` - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - * compile,qaic,model,_resnet50 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50'], 'CM_MLPERF_DEVICE': ['qaic']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': [True]}` - * CM names: `--adr.['qaic-model-compiler', 'resnet50-compiler']...` - - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) - * get,dataset,imagenet,preprocessed,_for.resnet50,_NHWC,_full - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': [True]}` - * CM names: `--adr.['imagenet-preprocessed', 'dataset-preprocessed']...` - - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) - * get,squad-vocab - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['bert-99', 'bert-99.9']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': [True]}` - * CM names: `--adr.['bert-vocab']...` - - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) - * get,dataset,tokenized,squad,_raw - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['bert-99', 'bert-99.9']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': [True]}` - * CM names: `--adr.['squad-tokenized']...` - - CM script: [get-preprocessed-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-squad) - * compile,qaic,model,_retinanet - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet'], 'CM_MLPERF_DEVICE': ['qaic']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': [True]}` - * CM names: `--adr.['qaic-model-compiler', 'retinanet-compiler']...` - - CM script: [compile-model-for.qaic](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-model-for.qaic) - * get,dataset,preprocessed,openimages,_for.retinanet.onnx,_NCHW,_validation,_custom-annotations - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': [True]}` - * CM names: `--adr.['openimages-preprocessed', 'dataset-preprocessed']...` - - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) - * get,lib,onnxruntime,lang-cpp,_cpu - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['cpu']}` - - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) - * get,lib,onnxruntime,lang-cpp,_cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu']}` - - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/_cm.yaml) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-qualcomm/_cm.yaml)*** - * compile,cpp-program - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': [True]}` - * CM names: `--adr.['compile-program']...` - - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) - * benchmark-mlperf - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': ['yes', True]}` - * CM names: `--adr.['runner', 'mlperf-runner']...` - - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) - * save,mlperf,inference,state - * CM names: `--adr.['save-mlperf-inference-state']...` - - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) - -___ -### Script output -`cmr "reproduce mlcommons mlperf inference harness qualcomm-harness qualcomm kilt-harness kilt [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -* `CM_HW_NAME` -* `CM_IMAGENET_ACCURACY_DTYPE` -* `CM_MAX_EXAMPLES` -* `CM_MLPERF_*` -* `CM_ML_MODEL_*` -* `CM_SQUAD_ACCURACY_DTYPE` -#### New environment keys auto-detected from customize - -* `CM_DATASET_LIST` -* `CM_MLPERF_CONF` -* `CM_MLPERF_DEVICE` -* `CM_MLPERF_USER_CONF` \ No newline at end of file diff --git a/docs/Modular-MLPerf-benchmarks/index.md b/docs/Modular-MLPerf-benchmarks/index.md new file mode 100644 index 000000000..d599bdf46 --- /dev/null +++ b/docs/Modular-MLPerf-benchmarks/index.md @@ -0,0 +1,5 @@ +The Modular MLPerf benchmarks category contains the following scripts: + +- [app-mlperf-inference-dummy](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-mlperf-inference-dummy/README.md) +- [app-mlperf-inference-intel](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-mlperf-inference-intel/README.md) +- [app-mlperf-inference-qualcomm](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-mlperf-inference-qualcomm/README.md) diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python/index.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python/index.md deleted file mode 100644 index d4b87036e..000000000 --- a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python/index.md +++ /dev/null @@ -1,331 +0,0 @@ -Automatically generated README for this automation recipe: **app-loadgen-generic-python** - -Category: **Modular MLPerf inference benchmark pipeline** - -License: **Apache 2.0** - -Developers: [Gaz Iqbal](https://www.linkedin.com/in/gaziqbal), [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Grigori Fursin](https://cKnowledge.org/gfursin) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-loadgen-generic-python,d3d949cc361747a6) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *python,app,generic,loadgen* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "python app generic loadgen" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=python,app,generic,loadgen` - -`cm run script --tags=python,app,generic,loadgen[,variations] [--input_flags]` - -*or* - -`cmr "python app generic loadgen"` - -`cmr "python app generic loadgen [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - - -#### Input Flags - -* --**modelpath**=Full path to file with model weights -* --**modelcodepath**=(for PyTorch models) Full path to file with model code and cmc.py -* --**modelcfgpath**=(for PyTorch models) Full path to JSON file with model cfg -* --**modelsamplepath**=(for PyTorch models) Full path to file with model sample in pickle format -* --**ep**=ONNX Execution provider -* --**scenario**=MLPerf LoadGen scenario -* --**samples**=Number of samples (*2*) -* --**runner**=MLPerf runner -* --**execmode**=MLPerf exec mode -* --**output_dir**=MLPerf output directory -* --**concurrency**=MLPerf concurrency -* --**intraop**=MLPerf intra op threads -* --**interop**=MLPerf inter op threads - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "modelpath":...} -``` -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'python,app,generic,loadgen' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="python,app,generic,loadgen"``` - -#### Run this script via Docker (beta) - -`cm docker script "python app generic loadgen[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_cmc` - - Environment variables: - - *CM_CUSTOM_MODEL_CMC*: `True` - - Workflow: - * `_custom,cmc` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,ml-model,cmc - - CM script: [get-ml-model-abtf-ssd-pytorch](https://github.com/mlcommons/cm4abtf/tree/master/script/get-ml-model-abtf-ssd-pytorch) - * `_custom,huggingface` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,ml-model,huggingface - * CM names: `--adr.['hf-downloader']...` - - CM script: [get-ml-model-huggingface-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-huggingface-zoo) - * `_huggingface` - - Environment variables: - - *CM_CUSTOM_MODEL_SOURCE*: `huggingface` - - Workflow: - * `_model-stub.#` - - Environment variables: - - *CM_ML_MODEL_STUB*: `#` - - Workflow: - -
    - - - * Group "**backend**" -
    - Click here to expand this section. - - * **`_onnxruntime`** (default) - - Environment variables: - - *CM_MLPERF_BACKEND*: `onnxruntime` - - Workflow: - * `_pytorch` - - Environment variables: - - *CM_MLPERF_BACKEND*: `pytorch` - - Workflow: - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * **`_cpu`** (default) - - Environment variables: - - *CM_MLPERF_DEVICE*: `cpu` - - *CM_MLPERF_EXECUTION_PROVIDER*: `CPUExecutionProvider` - - Workflow: - * `_cuda` - - Environment variables: - - *CM_MLPERF_DEVICE*: `gpu` - - *CM_MLPERF_EXECUTION_PROVIDER*: `CUDAExecutionProvider` - - Workflow: - -
    - - - * Group "**models**" -
    - Click here to expand this section. - - * `_custom` - - Environment variables: - - *CM_MODEL*: `custom` - - Workflow: - * `_resnet50` - - Environment variables: - - *CM_MODEL*: `resnet50` - - Workflow: - * `_retinanet` - - Environment variables: - - *CM_MODEL*: `retinanet` - - Workflow: - -
    - - -#### Default variations - -`_cpu,_onnxruntime` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--concurrency=value` → `CM_MLPERF_CONCURRENCY=value` -* `--ep=value` → `CM_MLPERF_EXECUTION_PROVIDER=value` -* `--execmode=value` → `CM_MLPERF_EXEC_MODE=value` -* `--interop=value` → `CM_MLPERF_INTEROP=value` -* `--intraop=value` → `CM_MLPERF_INTRAOP=value` -* `--loadgen_duration_sec=value` → `CM_MLPERF_LOADGEN_DURATION_SEC=value` -* `--loadgen_expected_qps=value` → `CM_MLPERF_LOADGEN_EXPECTED_QPS=value` -* `--modelcfg=value` → `CM_ML_MODEL_CFG=value` -* `--modelcfgpath=value` → `CM_ML_MODEL_CFG_WITH_PATH=value` -* `--modelcodepath=value` → `CM_ML_MODEL_CODE_WITH_PATH=value` -* `--modelpath=value` → `CM_ML_MODEL_FILE_WITH_PATH=value` -* `--modelsamplepath=value` → `CM_ML_MODEL_SAMPLE_WITH_PATH=value` -* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` -* `--runner=value` → `CM_MLPERF_RUNNER=value` -* `--samples=value` → `CM_MLPERF_LOADGEN_SAMPLES=value` -* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "concurrency":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_EXECUTION_MODE: `parallel` -* CM_MLPERF_BACKEND: `onnxruntime` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_psutil - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_DEVICE': ['gpu']}` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,loadgen - * CM names: `--adr.['loadgen']...` - - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) - * get,generic-python-lib,_onnxruntime - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['cpu']}` - * CM names: `--adr.['onnxruntime']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnxruntime_gpu - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu']}` - * CM names: `--adr.['onnxruntime']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnx - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['onnxruntime']}` - * CM names: `--adr.['onnx']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['cpu']}` - * CM names: `--adr.['torch']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchvision - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['cpu']}` - * CM names: `--adr.['torchvision']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch_cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['gpu']}` - * CM names: `--adr.['torch']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchvision_cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['gpu']}` - * CM names: `--adr.['torchvision']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,ml-model,resnet50,_onnx - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - * get,ml-model,retinanet,_onnx,_fp32 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) - * get,ml-model,retinanet,_onnx,_fp32 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-loadgen-generic-python/_cm.yaml) - -___ -### Script output -`cmr "python app generic loadgen [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_MLPERF_*` -#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite/index.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite/index.md deleted file mode 100644 index dce3fd1b7..000000000 --- a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite/index.md +++ /dev/null @@ -1,382 +0,0 @@ -Automatically generated README for this automation recipe: **app-mlperf-inference-ctuning-cpp-tflite** - -Category: **Modular MLPerf inference benchmark pipeline** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-ctuning-cpp-tflite,415904407cca404a) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *app,mlperf,inference,tflite-cpp* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "app mlperf inference tflite-cpp" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=app,mlperf,inference,tflite-cpp` - -`cm run script --tags=app,mlperf,inference,tflite-cpp[,variations] [--input_flags]` - -*or* - -`cmr "app mlperf inference tflite-cpp"` - -`cmr "app mlperf inference tflite-cpp [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'app,mlperf,inference,tflite-cpp' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="app,mlperf,inference,tflite-cpp"``` - -#### Run this script via Docker (beta) - -`cm docker script "app mlperf inference tflite-cpp[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_armnn` - - Environment variables: - - *CM_MLPERF_TFLITE_USE_ARMNN*: `yes` - - *CM_TMP_LINK_LIBS*: `tensorflowlite,armnn` - - Workflow: - * `_armnn,tflite` - - Environment variables: - - *CM_MLPERF_BACKEND*: `armnn_tflite` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - *CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX*: `tflite_armnn_cpp` - - *CM_TMP_LINK_LIBS*: `tensorflowlite,armnn,armnnTfLiteParser` - - *CM_TMP_SRC_FOLDER*: `armnn` - - Workflow: - -
    - - - * Group "**backend**" -
    - Click here to expand this section. - - * `_tf` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tf` - - Workflow: - * **`_tflite`** (default) - - Environment variables: - - *CM_MLPERF_BACKEND*: `tflite` - - *CM_MLPERF_BACKEND_VERSION*: `master` - - *CM_TMP_LINK_LIBS*: `tensorflowlite` - - *CM_TMP_SRC_FOLDER*: `src` - - Workflow: - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * **`_cpu`** (default) - - Environment variables: - - *CM_MLPERF_DEVICE*: `cpu` - - Workflow: - * `_gpu` - - Environment variables: - - *CM_MLPERF_DEVICE*: `gpu` - - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` - - Workflow: - -
    - - - * Group "**loadgen-scenario**" -
    - Click here to expand this section. - - * **`_singlestream`** (default) - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` - - Workflow: - -
    - - - * Group "**model**" -
    - Click here to expand this section. - - * `_efficientnet` - - Environment variables: - - *CM_MODEL*: `efficientnet` - - Workflow: - * `_mobilenet` - - Environment variables: - - *CM_MODEL*: `mobilenet` - - Workflow: - * **`_resnet50`** (default) - - Environment variables: - - *CM_MODEL*: `resnet50` - - Workflow: - -
    - - - * Group "**optimization-target**" -
    - Click here to expand this section. - - * `_use-neon` - - Environment variables: - - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX1*: `using_neon` - - *CM_MLPERF_TFLITE_USE_NEON*: `1` - - Workflow: - * `_use-opencl` - - Environment variables: - - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX1*: `using_opencl` - - *CM_MLPERF_TFLITE_USE_OPENCL*: `1` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * **`_fp32`** (default) - - Environment variables: - - *CM_MLPERF_MODEL_PRECISION*: `float32` - - Workflow: - * `_int8` - - Environment variables: - - *CM_DATASET_COMPRESSED*: `on` - - *CM_MLPERF_MODEL_PRECISION*: `int8` - - Workflow: - * `_uint8` - - Environment variables: - - *CM_DATASET_COMPRESSED*: `on` - - *CM_MLPERF_MODEL_PRECISION*: `uint8` - - Workflow: - -
    - - -#### Default variations - -`_cpu,_fp32,_resnet50,_singlestream,_tflite` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--compressed_dataset=value` → `CM_DATASET_COMPRESSED=value` -* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` -* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` -* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` -* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` -* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` -* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` -* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` -* `--verbose=value` → `CM_VERBOSE=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "compressed_dataset":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_DATASET_COMPRESSED: `off` -* CM_DATASET_INPUT_SQUARE_SIDE: `224` -* CM_FAST_COMPILATION: `yes` -* CM_LOADGEN_BUFFER_SIZE: `1024` -* CM_MLPERF_LOADGEN_MODE: `accuracy` -* CM_MLPERF_LOADGEN_SCENARIO: `SingleStream` -* CM_MLPERF_LOADGEN_TRIGGER_COLD_RUN: `0` -* CM_MLPERF_OUTPUT_DIR: `.` -* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `tflite_cpp` -* CM_MLPERF_TFLITE_USE_NEON: `0` -* CM_MLPERF_TFLITE_USE_OPENCL: `0` -* CM_ML_MODEL_GIVEN_CHANNEL_MEANS: `123.68 116.78 103.94` -* CM_ML_MODEL_NORMALIZE_DATA: `0` -* CM_ML_MODEL_SUBTRACT_MEANS: `1` -* CM_VERBOSE: `0` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_DEVICE': ['gpu']}` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,loadgen - * CM names: `--adr.['loadgen']...` - - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,ml-model,mobilenet,raw,_tflite - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['tflite', 'armnn_tflite'], 'CM_MODEL': ['mobilenet']}` - * CM names: `--adr.['ml-model', 'tflite-model', 'mobilenet-model']...` - - CM script: [get-ml-model-mobilenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-mobilenet) - * get,ml-model,resnet50,raw,_tflite,_no-argmax - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['tflite', 'armnn_tflite'], 'CM_MODEL': ['resnet50']}` - * CM names: `--adr.['ml-model', 'tflite-model', 'resnet50-model']...` - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - * get,ml-model,resnet50,raw,_tf - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['tf'], 'CM_MODEL': ['resnet50']}` - * CM names: `--adr.['ml-model', 'tflite-model', 'resnet50-model']...` - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - * get,ml-model,efficientnet,raw,_tflite - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['tflite', 'armnn_tflite'], 'CM_MODEL': ['efficientnet']}` - * CM names: `--adr.['ml-model', 'tflite-model', 'efficientnet-model']...` - - CM script: [get-ml-model-efficientnet-lite](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-efficientnet-lite) - * get,tensorflow,lib,_tflite - - CM script: [install-tensorflow-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-from-src) - * get,lib,armnn - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_TFLITE_USE_ARMNN': ['yes']}` - * CM names: `--adr.['armnn', 'lib-armnn']...` - - CM script: [get-lib-armnn](https://github.com/mlcommons/cm4mlops/tree/master/script/get-lib-armnn) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/_cm.json)*** - * generate,user-conf,mlperf,inference - * CM names: `--adr.['user-conf-generator']...` - - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) - * get,dataset,preprocessed,imagenet,_for.resnet50,_rgb32,_NHWC - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': ['no'], 'CM_MODEL': ['resnet50']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_DATASET_COMPRESSED': ['on']}` - * CM names: `--adr.['imagenet-preprocessed', 'preprocessed-dataset']...` - - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) - * get,dataset,preprocessed,imagenet,_for.mobilenet,_rgb32,_NHWC - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': ['no'], 'CM_MODEL': ['mobilenet', 'efficientnet']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_DATASET_COMPRESSED': ['on']}` - * CM names: `--adr.['imagenet-preprocessed', 'preprocessed-dataset']...` - - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) - * get,dataset,preprocessed,imagenet,_for.mobilenet,_rgb8,_NHWC - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DATASET_COMPRESSED': ['on'], 'CM_MLPERF_SKIP_RUN': ['no'], 'CM_MODEL': ['mobilenet', 'efficientnet']}` - * CM names: `--adr.['imagenet-preprocessed', 'preprocessed-dataset']...` - - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) - * get,dataset,preprocessed,imagenet,_for.resnet50,_rgb8,_NHWC - * Enable this dependency only if all ENV vars are set:
    -`{'CM_DATASET_COMPRESSED': ['on'], 'CM_MLPERF_SKIP_RUN': ['no'], 'CM_MODEL': ['resnet50']}` - * CM names: `--adr.['imagenet-preprocessed', 'preprocessed-dataset']...` - - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-ctuning-cpp-tflite/_cm.json)*** - * compile,program - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': ['yes']}` - * CM names: `--adr.['compiler-program']...` - - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) - * benchmark-mlperf - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': ['yes']}` - * CM names: `--adr.['mlperf-runner']...` - - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) - * save,mlperf,inference,state - * CM names: `--adr.['save-mlperf-inference-state']...` - - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) - -___ -### Script output -`cmr "app mlperf inference tflite-cpp [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_HW_NAME` -* `CM_MLPERF_*` -* `CM_ML_MODEL_*` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_CONF` -* `CM_MLPERF_DEVICE` -* `CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX2` -* `CM_MLPERF_USER_CONF` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp/index.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp/index.md deleted file mode 100644 index 35b59a51b..000000000 --- a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp/index.md +++ /dev/null @@ -1,336 +0,0 @@ -Automatically generated README for this automation recipe: **app-mlperf-inference-mlcommons-cpp** - -Category: **Modular MLPerf inference benchmark pipeline** - -License: **Apache 2.0** - -Developers: [Thomas Zhu](https://www.linkedin.com/in/hanwen-zhu-483614189), [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Grigori Fursin](https://cKnowledge.org/gfursin) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-mlcommons-cpp,bf62405e6c7a44bf) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *app,mlcommons,mlperf,inference,cpp* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "app mlcommons mlperf inference cpp" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=app,mlcommons,mlperf,inference,cpp` - -`cm run script --tags=app,mlcommons,mlperf,inference,cpp[,variations] [--input_flags]` - -*or* - -`cmr "app mlcommons mlperf inference cpp"` - -`cmr "app mlcommons mlperf inference cpp [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'app,mlcommons,mlperf,inference,cpp' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="app,mlcommons,mlperf,inference,cpp"``` - -#### Run this script via Docker (beta) - -`cm docker script "app mlcommons mlperf inference cpp[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_multistream,resnet50` - - Workflow: - * `_multistream,retinanet` - - Workflow: - * `_offline,resnet50` - - Workflow: - * `_resnet50,multistream` - - Workflow: - * `_resnet50,offline` - - Workflow: - * `_resnet50,server` - - Workflow: - -
    - - - * Group "**batch-size**" -
    - Click here to expand this section. - - * `_batch-size.#` - - Environment variables: - - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `#` - - Workflow: - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * **`_cpu`** (default) - - Environment variables: - - *CM_MLPERF_DEVICE*: `cpu` - - Workflow: - * `_cuda` - - Environment variables: - - *CM_MLPERF_DEVICE*: `gpu` - - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * **`_onnxruntime`** (default) - - Environment variables: - - *CM_MLPERF_BACKEND*: `onnxruntime` - - *CM_MLPERF_BACKEND_LIB_NAMESPEC*: `onnxruntime` - - Workflow: - * `_pytorch` - - Environment variables: - - *CM_MLPERF_BACKEND*: `pytorch` - - Workflow: - * `_tf` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tf` - - Workflow: - * `_tflite` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tflite` - - Workflow: - * `_tvm-onnx` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tvm-onnx` - - Workflow: - -
    - - - * Group "**loadgen-scenario**" -
    - Click here to expand this section. - - * `_multistream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` - - Workflow: - * **`_offline`** (default) - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` - - Workflow: - * `_server` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` - - Workflow: - * `_singlestream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` - - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `1` - - Workflow: - -
    - - - * Group "**model**" -
    - Click here to expand this section. - - * **`_resnet50`** (default) - - Environment variables: - - *CM_MODEL*: `resnet50` - - Workflow: - * `_retinanet` - - Environment variables: - - *CM_MODEL*: `retinanet` - - Workflow: - -
    - - -#### Default variations - -`_cpu,_offline,_onnxruntime,_resnet50` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` -* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` -* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` -* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` -* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` -* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` -* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` -* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "count":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_BATCH_COUNT: `1` -* CM_BATCH_SIZE: `1` -* CM_FAST_COMPILATION: `yes` -* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `cpp` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,cuda,_cudnn - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_DEVICE': ['gpu']}` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,loadgen - * CM names: `--adr.['loadgen']...` - - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,lib,onnxruntime,lang-cpp,_cpu - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['cpu']}` - - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) - * get,lib,onnxruntime,lang-cpp,_cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu']}` - - CM script: [get-onnxruntime-prebuilt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-onnxruntime-prebuilt) - * get,dataset,preprocessed,imagenet,_NCHW - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - * CM names: `--adr.['imagenet-preprocessed']...` - - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) - * get,ml-model,raw,resnet50,_onnx - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - * get,dataset,preprocessed,openimages,_validation,_NCHW - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - * CM names: `--adr.['openimages-preprocessed']...` - - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) - * get,ml-model,retinanet,_onnx,_fp32 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) - * generate,user-conf,mlperf,inference - * CM names: `--adr.['user-conf-generator']...` - - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/_cm.yaml) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-cpp/_cm.yaml)*** - * compile,cpp-program - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': ['yes']}` - * CM names: `--adr.['compile-program']...` - - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) - * benchmark-mlperf - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': ['yes']}` - * CM names: `--adr.['mlperf-runner']...` - - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) - * save,mlperf,inference,state - * CM names: `--adr.['save-mlperf-inference-state']...` - - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) - -___ -### Script output -`cmr "app mlcommons mlperf inference cpp [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -* `CM_HW_NAME` -* `CM_MLPERF_*` -* `CM_ML_MODEL_*` -#### New environment keys auto-detected from customize - -* `CM_DATASET_LIST` -* `CM_MLPERF_CONF` -* `CM_MLPERF_DEVICE` -* `CM_MLPERF_USER_CONF` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python/index.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python/index.md deleted file mode 100644 index d8e825fc8..000000000 --- a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python/index.md +++ /dev/null @@ -1,944 +0,0 @@ -Automatically generated README for this automation recipe: **app-mlperf-inference-mlcommons-python** - -Category: **Modular MLPerf inference benchmark pipeline** - -License: **Apache 2.0** - -Developers: [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Thomas Zhu](https://www.linkedin.com/in/hanwen-zhu-483614189), [Grigori Fursin](https://cKnowledge.org/gfursin) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-mlcommons-python,ff149e9781fc4b65) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- - -This portable CM script is being developed by the [MLCommons taskforce on automation and reproducibility](https://github.com/mlcommons/ck/blob/master/docs/mlperf-education-workgroup.md) -to modularize the *python reference implementations* of the [MLPerf inference benchmark](https://github.com/mlcommons/inference) -using the [MLCommons CM automation meta-framework](https://github.com/mlcommons/ck). -The goal is to make it easier to run, optimize and reproduce MLPerf benchmarks -across diverse platforms with continuously changing software and hardware. - -See the current coverage of different models, devices and backends [here](README-extra.md#current-coverage). - - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *app,vision,language,mlcommons,mlperf,inference,reference,ref* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "app vision language mlcommons mlperf inference reference ref" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=app,vision,language,mlcommons,mlperf,inference,reference,ref` - -`cm run script --tags=app,vision,language,mlcommons,mlperf,inference,reference,ref[,variations] [--input_flags]` - -*or* - -`cmr "app vision language mlcommons mlperf inference reference ref"` - -`cmr "app vision language mlcommons mlperf inference reference ref [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'app,vision,language,mlcommons,mlperf,inference,reference,ref' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="app,vision,language,mlcommons,mlperf,inference,reference,ref"``` - -#### Run this script via Docker (beta) - -`cm docker script "app vision language mlcommons mlperf inference reference ref[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *Internal group (variations should not be selected manually)* -
    - Click here to expand this section. - - * `_gptj_` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_torch - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.datasets - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.attrs - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.accelerate - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_llama2-70b_` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_package.transformers - * CM names: `--adr.['transformers']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.datasets - * CM names: `--adr.['datasets']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.sentencepiece - * CM names: `--adr.['sentencepiece']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.protobuf - * CM names: `--adr.['protobuf']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.accelerate - * CM names: `--adr.['accelerate']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.absl-py - * CM names: `--adr.['absl-py']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.evaluate - * CM names: `--adr.['evaluate']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.nltk - * CM names: `--adr.['nltk']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.rouge-score - * CM names: `--adr.['rouge-score']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_3d-unet` - - Environment variables: - - *CM_TMP_IGNORE_MLPERF_QUERY_COUNT*: `True` - - *CM_MLPERF_MODEL_SKIP_BATCHING*: `True` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_package.nibabel - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_beam_size.#` - - Environment variables: - - *GPTJ_BEAM_SIZE*: `#` - - Workflow: - * `_bert` - - Environment variables: - - *CM_MLPERF_MODEL_SKIP_BATCHING*: `True` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_package.pydantic - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_tokenization - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_six - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.absl-py - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_protobuf - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` - * CM names: `--adr.['protobuf']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_boto3 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['pytorch']}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_DEVICE': ['gpu']}` - * CM names: `--adr.['ml-engine-pytorch', 'pytorch']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_dlrm` - - Environment variables: - - *CM_MLPERF_MODEL_SKIP_BATCHING*: `True` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,dlrm,src - * CM names: `--adr.['dlrm-src']...` - - CM script: [get-dlrm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dlrm) - * get,generic-python-lib,_mlperf_logging - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_opencv-python - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_tensorboard - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_protobuf - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_scikit-learn - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_tqdm - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnx - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.torchrec - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.pyre-extensions - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.torchsnapshot - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_llama2-70b_,cuda` - - Workflow: - * `_multistream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` - - Workflow: - * `_offline` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` - - Workflow: - * `_onnxruntime,cpu` - - Environment variables: - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - Workflow: - * `_onnxruntime,cuda` - - Environment variables: - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - *ONNXRUNTIME_PREFERRED_EXECUTION_PROVIDER*: `CUDAExecutionProvider` - - Workflow: - * `_onnxruntime,rocm` - - Environment variables: - - *ONNXRUNTIME_PREFERRED_EXECUTION_PROVIDER*: `ROCMExecutionProvider` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - Workflow: - * `_pytorch,rocm` - - Workflow: - * `_r2.1_default` - - Environment variables: - - *CM_RERUN*: `yes` - - *CM_SKIP_SYS_UTILS*: `yes` - - *CM_TEST_QUERY_COUNT*: `100` - - Workflow: - * `_server` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` - - Workflow: - * `_singlestream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` - - Workflow: - * `_tf,rocm` - - Environment variables: - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - Workflow: - * `_tpu,tflite` - - Workflow: - -
    - - - * Group "**batch-size**" -
    - Click here to expand this section. - - * `_batch_size.#` - - Environment variables: - - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `#` - - Workflow: - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * **`_cpu`** (default) - - Environment variables: - - *CM_MLPERF_DEVICE*: `cpu` - - *CUDA_VISIBLE_DEVICES*: `` - - *USE_CUDA*: `False` - - *USE_GPU*: `False` - - Workflow: - * `_cuda` - - Environment variables: - - *CM_MLPERF_DEVICE*: `gpu` - - *USE_CUDA*: `True` - - *USE_GPU*: `True` - - Workflow: - * `_rocm` - - Environment variables: - - *CM_MLPERF_DEVICE*: `rocm` - - *USE_GPU*: `True` - - Workflow: - * `_tpu` - - Environment variables: - - *CM_MLPERF_DEVICE*: `tpu` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * `_deepsparse` - - Environment variables: - - *CM_MLPERF_BACKEND*: `deepsparse` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_deepsparse - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_HOST_PLATFORM_FLAVOR': ['aarch64']}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.deepsparse-nightly - * Enable this dependency only if all ENV vars are set:
    -`{'CM_HOST_PLATFORM_FLAVOR': ['aarch64']}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_ncnn` - - Environment variables: - - *CM_MLPERF_BACKEND*: `ncnn` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - *CM_MLPERF_VISION_DATASET_OPTION*: `imagenet_pytorch` - - Workflow: - * **`_onnxruntime`** (default) - - Environment variables: - - *CM_MLPERF_BACKEND*: `onnxruntime` - - Workflow: - * `_pytorch` - - Environment variables: - - *CM_MLPERF_BACKEND*: `pytorch` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - Workflow: - * `_ray` - - Environment variables: - - *CM_MLPERF_BACKEND*: `ray` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - Workflow: - * `_tf` - - Aliases: `_tensorflow` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tf` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - Workflow: - * `_tflite` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tflite` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - *CM_MLPERF_VISION_DATASET_OPTION*: `imagenet_tflite_tpu` - - Workflow: - * `_tvm-onnx` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tvm-onnx` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_onnx - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,tvm - * CM names: `--adr.['tvm']...` - - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) - * get,tvm-model,_onnx - * CM names: `--adr.['tvm-model']...` - - CM script: [get-tvm-model](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm-model) - * `_tvm-pytorch` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tvm-pytorch` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - *CM_PREPROCESS_PYTORCH*: `yes` - - *MLPERF_TVM_TORCH_QUANTIZED_ENGINE*: `qnnpack` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_torch - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,tvm - * CM names: `--adr.['tvm']...` - - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) - * get,tvm-model,_pytorch - * CM names: `--adr.['tvm-model']...` - - CM script: [get-tvm-model](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm-model) - * `_tvm-tflite` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tvm-tflite` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_tflite - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,tvm - * CM names: `--adr.['tvm']...` - - CM script: [get-tvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm) - * get,tvm-model,_tflite - * CM names: `--adr.['tvm-model']...` - - CM script: [get-tvm-model](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tvm-model) - -
    - - - * Group "**implementation**" -
    - Click here to expand this section. - - * **`_python`** (default) - - Environment variables: - - *CM_MLPERF_PYTHON*: `yes` - - *CM_MLPERF_IMPLEMENTATION*: `reference` - - Workflow: - -
    - - - * Group "**models**" -
    - Click here to expand this section. - - * `_3d-unet-99` - - Environment variables: - - *CM_MODEL*: `3d-unet-99` - - Workflow: - * `_3d-unet-99.9` - - Environment variables: - - *CM_MODEL*: `3d-unet-99.9` - - Workflow: - * `_bert-99` - - Environment variables: - - *CM_MODEL*: `bert-99` - - Workflow: - * `_bert-99.9` - - Environment variables: - - *CM_MODEL*: `bert-99.9` - - Workflow: - * `_dlrm-99` - - Environment variables: - - *CM_MODEL*: `dlrm-99` - - Workflow: - * `_dlrm-99.9` - - Environment variables: - - *CM_MODEL*: `dlrm-99.9` - - Workflow: - * `_gptj-99` - - Environment variables: - - *CM_MODEL*: `gptj-99` - - Workflow: - * `_gptj-99.9` - - Environment variables: - - *CM_MODEL*: `gptj-99.9` - - Workflow: - * `_llama2-70b-99` - - Environment variables: - - *CM_MODEL*: `llama2-70b-99` - - Workflow: - * `_llama2-70b-99.9` - - Environment variables: - - *CM_MODEL*: `llama2-70b-99.9` - - Workflow: - * **`_resnet50`** (default) - - Environment variables: - - *CM_MODEL*: `resnet50` - - *CM_MLPERF_USE_MLCOMMONS_RUN_SCRIPT*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_opencv-python - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pycocotools - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Read "prehook_deps" on other CM scripts*** - * get,generic-python-lib,_protobuf - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` - * CM names: `--adr.['protobuf']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_retinanet` - - Environment variables: - - *CM_MODEL*: `retinanet` - - *CM_MLPERF_USE_MLCOMMONS_RUN_SCRIPT*: `yes` - - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `1` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_opencv-python - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pycocotools - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_rnnt` - - Environment variables: - - *CM_MODEL*: `rnnt` - - *CM_MLPERF_MODEL_SKIP_BATCHING*: `True` - - *CM_TMP_IGNORE_MLPERF_QUERY_COUNT*: `True` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_package.pydantic - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_librosa - * CM names: `--adr.['librosa']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_inflect - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_unidecode - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_toml - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_sdxl` - - Environment variables: - - *CM_MODEL*: `stable-diffusion-xl` - - *CM_NUM_THREADS*: `1` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_package.diffusers - * CM names: `--adr.['diffusers']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.transformers - * CM names: `--adr.['transformers']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.accelerate - * CM names: `--adr.['accelerate']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.torchmetrics - * CM names: `--adr.['torchmetrics']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.torch-fidelity - * CM names: `--adr.['torch-fidelity']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.open_clip_torch - * CM names: `--adr.['open-clip']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.opencv-python - * CM names: `--adr.['opencv-python']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.scipy - * CM names: `--adr.['scipy']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - - * Group "**network**" -
    - Click here to expand this section. - - * `_network-lon` - - Environment variables: - - *CM_NETWORK_LOADGEN*: `lon` - - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX1*: `network_loadgen` - - Workflow: - * `_network-sut` - - Environment variables: - - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX1*: `network_sut` - - *CM_NETWORK_LOADGEN*: `sut` - - Workflow: - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * `_bfloat16` - - Environment variables: - - *CM_MLPERF_QUANTIZATION*: `False` - - *CM_MLPERF_MODEL_PRECISION*: `bfloat16` - - Workflow: - * `_float16` - - Environment variables: - - *CM_MLPERF_QUANTIZATION*: `False` - - *CM_MLPERF_MODEL_PRECISION*: `float16` - - Workflow: - * **`_fp32`** (default) - - Environment variables: - - *CM_MLPERF_QUANTIZATION*: `False` - - *CM_MLPERF_MODEL_PRECISION*: `float32` - - Workflow: - * `_int8` - - Aliases: `_quantized` - - Environment variables: - - *CM_MLPERF_QUANTIZATION*: `True` - - *CM_MLPERF_MODEL_PRECISION*: `int8` - - Workflow: - -
    - - -#### Default variations - -`_cpu,_fp32,_onnxruntime,_python,_resnet50` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` -* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` -* `--dataset=value` → `CM_MLPERF_VISION_DATASET_OPTION=value` -* `--dataset_args=value` → `CM_MLPERF_EXTRA_DATASET_ARGS=value` -* `--docker=value` → `CM_RUN_DOCKER_CONTAINER=value` -* `--hw_name=value` → `CM_HW_NAME=value` -* `--imagenet_path=value` → `IMAGENET_PATH=value` -* `--max_amps=value` → `CM_MLPERF_POWER_MAX_AMPS=value` -* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` -* `--max_volts=value` → `CM_MLPERF_POWER_MAX_VOLTS=value` -* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` -* `--model=value` → `CM_MLPERF_CUSTOM_MODEL_PATH=value` -* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` -* `--network=value` → `CM_NETWORK_LOADGEN=value` -* `--ntp_server=value` → `CM_MLPERF_POWER_NTP_SERVER=value` -* `--num_threads=value` → `CM_NUM_THREADS=value` -* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` -* `--output_dir=value` → `OUTPUT_BASE_DIR=value` -* `--power=value` → `CM_MLPERF_POWER=value` -* `--power_server=value` → `CM_MLPERF_POWER_SERVER_ADDRESS=value` -* `--regenerate_files=value` → `CM_REGENERATE_MEASURE_FILES=value` -* `--rerun=value` → `CM_RERUN=value` -* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` -* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` -* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` -* `--sut_servers=value` → `CM_NETWORK_LOADGEN_SUT_SERVERS=value` -* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` -* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` -* `--test_query_count=value` → `CM_TEST_QUERY_COUNT=value` -* `--threads=value` → `CM_NUM_THREADS=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "clean":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_LOADGEN_MODE: `accuracy` -* CM_MLPERF_LOADGEN_SCENARIO: `Offline` -* CM_OUTPUT_FOLDER_NAME: `test_results` -* CM_MLPERF_RUN_STYLE: `test` -* CM_TEST_QUERY_COUNT: `10` -* CM_MLPERF_QUANTIZATION: `False` -* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `reference` -* CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX: `` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,cuda,_cudnn - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_DEVICE': ['gpu'], 'CM_MLPERF_BACKEND': ['onnxruntime', 'tf', 'tflite', 'pytorch']}` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,nvidia,tensorrt - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['tensorrt']}` - - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) - * get,generic-python-lib,_onnxruntime - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['onnxruntime', 'tvm-onnx'], 'CM_MLPERF_DEVICE': ['cpu', 'rocm']}` - * CM names: `--adr.['ml-engine-onnxruntime', 'onnxruntime']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnxruntime_gpu - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['onnxruntime', 'tvm-onnx'], 'CM_MLPERF_DEVICE': ['gpu']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MODEL': ['3d-unet-99', '3d-unet-99.9']}` - * CM names: `--adr.['ml-engine-onnxruntime-cuda']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnxruntime - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu'], 'CM_MODEL': ['3d-unet-99', '3d-unet-99.9', 'resnet50']}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnxruntime_gpu - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['onnxruntime'], 'CM_MLPERF_DEVICE': ['gpu'], 'CM_MODEL': ['3d-unet-99', '3d-unet-99.9', 'resnet50']}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['pytorch', 'tvm-pytorch'], 'CM_MLPERF_DEVICE': ['cpu', 'rocm']}` - * CM names: `--adr.['ml-engine-pytorch', 'pytorch']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch_cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['pytorch', 'tvm-pytorch', 'ray'], 'CM_MLPERF_DEVICE': ['gpu']}` - * CM names: `--adr.['ml-engine-pytorch', 'pytorch']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchvision - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['pytorch', 'tvm-pytorch'], 'CM_MLPERF_DEVICE': ['cpu']}` - * CM names: `--adr.['ml-engine-torchvision']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchvision_cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['pytorch', 'tvm-pytorch', 'ray'], 'CM_MLPERF_DEVICE': ['gpu']}` - * CM names: `--adr.['ml-engine-torchvision']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_tensorrt - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['ray']}` - * CM names: `--adr.['ml-engine-tensorrt']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch_tensorrt - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['ray']}` - * CM names: `--adr.['ml-engine-torch_tensorrt']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_ray - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['ray']}` - * CM names: `--adr.['ray']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_async_timeout - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['ray']}` - * CM names: `--adr.['async_timeout']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_transformers - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['bert-99', 'bert-99.9', 'gptj-99', 'gptj-99.9']}` - * CM names: `--adr.['ml-engine-transformers']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_tensorflow - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` - * CM names: `--adr.['ml-engine-tensorflow', 'tensorflow']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.ncnn - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['ncnn']}` - * CM names: `--adr.['ml-engine-ncnn']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,ml-model,neural-magic,zoo - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_NEURALMAGIC_MODEL_ZOO_STUB': ['on']}` - * CM names: `--adr.['custom-ml-model']...` - - CM script: [get-ml-model-neuralmagic-zoo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-neuralmagic-zoo) - * get,ml-model,image-classification,resnet50 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_CUSTOM_MODEL_PATH': ['on']}` - * CM names: `--adr.['ml-model', 'resnet50-model']...` - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - * get,ml-model,object-detection,retinanet - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - * CM names: `--adr.['ml-model', 'retinanet-model']...` - - CM script: [get-ml-model-retinanet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-retinanet) - * get,ml-model,large-language-model,gptj - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['gptj-99', 'gptj-99.9']}` - * CM names: `--adr.['ml-model', 'gptj-model', 'gpt-j-model']...` - - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) - * get,ml-model,object-detection,resnext50,fp32,_pytorch-weights - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_IMPLEMENTATION': ['nvidia'], 'CM_MODEL': ['retinanet']}` - * CM names: `--adr.['ml-model', 'retinanet-model']...` - - *Warning: no scripts found* - * get,ml-model,language-processing,bert-large - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['bert-99', 'bert-99.9']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_CUSTOM_MODEL_PATH': ['on']}` - * CM names: `--adr.['ml-model', 'bert-model']...` - - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) - * get,ml-model,stable-diffusion,text-to-image,sdxl - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['stable-diffusion-xl']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_CUSTOM_MODEL_PATH': ['on']}` - * CM names: `--adr.['ml-model', 'sdxl-model']...` - - CM script: [get-ml-model-stable-diffusion](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-stable-diffusion) - * get,ml-model,llama2 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['llama2-70b-99', 'llama2-70b-99.9']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_CUSTOM_MODEL_PATH': ['on']}` - * CM names: `--adr.['ml-model', 'llama2-model']...` - - CM script: [get-ml-model-llama2](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-llama2) - * get,ml-model,medical-imaging,3d-unet - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['3d-unet-99', '3d-unet-99.9']}` - * CM names: `--adr.['ml-model', '3d-unet-model']...` - - CM script: [get-ml-model-3d-unet-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-3d-unet-kits19) - * get,ml-model,speech-recognition,rnnt - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['rnnt']}` - * CM names: `--adr.['ml-model', 'rnnt-model']...` - - CM script: [get-ml-model-rnnt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-rnnt) - * get,ml-model,recommendation,dlrm - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['dlrm-99', 'dlrm-99.9']}` - * CM names: `--adr.['ml-model', 'dlrm-model']...` - - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) - * get,dataset,image-classification,imagenet,preprocessed - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_VISION_DATASET_OPTION': [True]}` - * CM names: `--adr.['imagenet-preprocessed']...` - - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) - * get,dataset,image-classification,imagenet,preprocessed,_pytorch - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50'], 'CM_MLPERF_VISION_DATASET_OPTION': ['imagenet_pytorch']}` - * CM names: `--adr.['imagenet-preprocessed']...` - - CM script: [get-preprocessed-dataset-imagenet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-imagenet) - * get,dataset-aux,image-classification,imagenet-aux - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) - * get,dataset,object-detection,open-images,openimages,preprocessed,_validation - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - * CM names: `--adr.['openimages-preprocessed']...` - - CM script: [get-preprocessed-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openimages) - * get,dataset,cnndm,_validation - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['gptj-99', 'gptj-99.9']}` - * CM names: `--adr.['cnndm-preprocessed']...` - - CM script: [get-dataset-cnndm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-cnndm) - * get,dataset,squad,original - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['bert-99', 'bert-99.9']}` - * CM names: `--adr.['cnndm-preprocessed']...` - - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) - * get,dataset-aux,squad-vocab - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['bert-99', 'bert-99.9']}` - - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) - * get,dataset,coco2014,_validation - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['stable-diffusion-xl']}` - * CM names: `--adr.['coco2014-preprocessed']...` - - CM script: [get-dataset-coco2014](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-coco2014) - * get,preprocessed,dataset,openorca,_validation - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['llama2-70b-99', 'llama2-70b-99.9']}` - * CM names: `--adr.['openorca-preprocessed']...` - - CM script: [get-preprocessed-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-openorca) - * get,dataset,kits19,preprocessed - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['3d-unet-99', '3d-unet-99.9']}` - * CM names: `--adr.['kits19-preprocessed']...` - - CM script: [get-preprocessed-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-kits19) - * get,dataset,librispeech,preprocessed - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['rnnt']}` - * CM names: `--adr.['librispeech-preprocessed']...` - - CM script: [get-preprocessed-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-librispeech) - * get,dataset,criteo,preprocessed - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['dlrm-99', 'dlrm-99.9']}` - * CM names: `--adr.['criteo-preprocessed']...` - - CM script: [get-preprocessed-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-criteo) - * generate,user-conf,mlperf,inference - * CM names: `--adr.['user-conf-generator']...` - - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) - * get,loadgen - * CM names: `--adr.['loadgen', 'mlperf-inference-loadgen']...` - - CM script: [get-mlperf-inference-loadgen](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-loadgen) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,mlcommons,inference,src - * CM names: `--adr.['mlperf-implementation']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,generic-python-lib,_package.psutil - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/_cm.yaml)*** - * remote,run,cmds - * Enable this dependency only if all ENV vars are set:
    -`{'CM_ASSH_RUN_COMMANDS': ['on']}` - * CM names: `--adr.['remote-run-cmds']...` - - CM script: [remote-run-commands](https://github.com/mlcommons/cm4mlops/tree/master/script/remote-run-commands) - 1. ***Run native script if exists*** - 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/_cm.yaml)*** - * benchmark-mlperf - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': ['on']}` - * CM names: `--adr.['mlperf-runner']...` - - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-mlcommons-python/_cm.yaml)*** - * save,mlperf,inference,state - * CM names: `--adr.['save-mlperf-inference-state']...` - - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) - -___ -### Script output -`cmr "app vision language mlcommons mlperf inference reference ref [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -* `CM_HW_NAME` -* `CM_MAX_EXAMPLES` -* `CM_MLPERF_*` -* `CM_ML_MODEL_*` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_BACKEND` -* `CM_MLPERF_CONF` -* `CM_MLPERF_DEVICE` -* `CM_MLPERF_LOADGEN_EXTRA_OPTIONS` -* `CM_MLPERF_LOADGEN_MODE` -* `CM_MLPERF_LOADGEN_QPS_OPT` -* `CM_MLPERF_LOADGEN_SCENARIO` -* `CM_MLPERF_OUTPUT_DIR` -* `CM_MLPERF_RUN_CMD` -* `CM_ML_MODEL_FILE_WITH_PATH` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference/index.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference/index.md deleted file mode 100644 index 819393fee..000000000 --- a/docs/Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference/index.md +++ /dev/null @@ -1,805 +0,0 @@ -Automatically generated README for this automation recipe: **app-mlperf-inference** - -Category: **Modular MLPerf inference benchmark pipeline** - -License: **Apache 2.0** - -Developers: [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Thomas Zhu](https://www.linkedin.com/in/hanwen-zhu-483614189), [Grigori Fursin](https://cKnowledge.org/gfursin) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference,d775cac873ee4231) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- - -This CM script provides a unified interface to prepare and run a modular version of the [MLPerf inference benchmark](https://arxiv.org/abs/1911.02549) -across diverse ML models, data sets, frameworks, libraries, run-time systems and platforms -using the [cross-platform automation meta-framework (MLCommons CM)](https://github.com/mlcommons/ck). - -It is assembled from reusable and interoperable [CM scripts for DevOps and MLOps](../list_of_scripts.md) -being developed by the [open MLCommons taskforce on automation and reproducibility](../mlperf-education-workgroup.md). - -It is a higher-level wrapper to several other CM scripts modularizing the MLPerf inference benchmark: -* [Reference Python implementation](../app-mlperf-inference-reference) -* [Universal C++ implementation](../app-mlperf-inference-cpp) -* [TFLite C++ implementation](../app-mlperf-inference-tflite-cpp) -* [NVidia optimized implementation](app-mlperf-inference-nvidia) - -See [this SCC'23 tutorial](https://github.com/mlcommons/ck/blob/master/docs/tutorials/sc22-scc-mlperf.md) -to use this script to run a reference (unoptimized) Python implementation of the MLPerf object detection benchmark -with RetinaNet model, Open Images dataset, ONNX runtime and CPU target. - -See this [CM script](../run-mlperf-inference-app) to automate and validate your MLPerf inference submission. - -Get in touch with the [open taskforce on automation and reproducibility at MLCommons](https://github.com/mlcommons/ck/blob/master/docs/mlperf-education-workgroup.md) -if you need help with your submission or if you would like to participate in further modularization of MLPerf -and collaborative design space exploration and optimization of ML Systems. - - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *app,vision,language,mlcommons,mlperf,inference,generic* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "app vision language mlcommons mlperf inference generic" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=app,vision,language,mlcommons,mlperf,inference,generic` - -`cm run script --tags=app,vision,language,mlcommons,mlperf,inference,generic[,variations] [--input_flags]` - -*or* - -`cmr "app vision language mlcommons mlperf inference generic"` - -`cmr "app vision language mlcommons mlperf inference generic [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - - -#### Input Flags - -* --**scenario**=MLPerf inference scenario {Offline,Server,SingleStream,MultiStream} (*Offline*) -* --**mode**=MLPerf inference mode {performance,accuracy} (*accuracy*) -* --**test_query_count**=Specifies the number of samples to be processed during a test run -* --**target_qps**=Target QPS -* --**target_latency**=Target Latency -* --**max_batchsize**=Maximum batchsize to be used -* --**num_threads**=Number of CPU threads to launch the application with -* --**hw_name**=Valid value - any system description which has a config file (under same name) defined [here](https://github.com/mlcommons/cm4mlops/tree/main/script/get-configs-sut-mlperf-inference/configs) -* --**output_dir**=Location where the outputs are produced -* --**rerun**=Redo the run even if previous run files exist (*True*) -* --**regenerate_files**=Regenerates measurement files including accuracy.txt files even if a previous run exists. This option is redundant if `--rerun` is used -* --**adr.python.name**=Python virtual environment name (optional) (*mlperf*) -* --**adr.python.version_min**=Minimal Python version (*3.8*) -* --**adr.python.version**=Force Python version (must have all system deps) -* --**adr.compiler.tags**=Compiler for loadgen (*gcc*) -* --**adr.inference-src-loadgen.env.CM_GIT_URL**=Git URL for MLPerf inference sources to build LoadGen (to enable non-reference implementations) -* --**adr.inference-src.env.CM_GIT_URL**=Git URL for MLPerf inference sources to run benchmarks (to enable non-reference implementations) -* --**quiet**=Quiet run (select default values for all questions) (*False*) -* --**readme**=Generate README with the reproducibility report -* --**debug**=Debug MLPerf script - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "scenario":...} -``` -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'app,vision,language,mlcommons,mlperf,inference,generic' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="app,vision,language,mlcommons,mlperf,inference,generic"``` - -#### Run this script via Docker (beta) - -`cm docker script "app vision language mlcommons mlperf inference generic[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**implementation**" -
    - Click here to expand this section. - - * `_cpp` - - Aliases: `_mil,_mlcommons-cpp` - - Environment variables: - - *CM_MLPERF_CPP*: `yes` - - *CM_MLPERF_IMPLEMENTATION*: `mlcommons_cpp` - - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` - - *CM_OPENIMAGES_ACCURACY_DTYPE*: `float32` - - Workflow: - 1. ***Read "prehook_deps" on other CM scripts*** - * app,mlperf,cpp,inference - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_SKIP_RUN': [True]}` - * CM names: `--adr.['cpp-mlperf-inference', 'mlperf-inference-implementation']...` - - CM script: [app-mlperf-inference-mlcommons-cpp](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-mlcommons-cpp) - * `_intel-original` - - Aliases: `_intel` - - Environment variables: - - *CM_MLPERF_IMPLEMENTATION*: `intel` - - Workflow: - 1. ***Read "prehook_deps" on other CM scripts*** - * reproduce,mlperf,inference,intel - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_SKIP_RUN': [True]}` - * CM names: `--adr.['intel', 'intel-harness', 'mlperf-inference-implementation']...` - - CM script: [app-mlperf-inference-intel](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-intel) - * `_kilt` - - Aliases: `_qualcomm` - - Environment variables: - - *CM_MLPERF_IMPLEMENTATION*: `qualcomm` - - Workflow: - 1. ***Read "prehook_deps" on other CM scripts*** - * reproduce,mlperf,inference,kilt - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_SKIP_RUN': [True]}` - * CM names: `--adr.['kilt', 'kilt-harness', 'mlperf-inference-implementation']...` - - CM script: [app-mlperf-inference-qualcomm](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-qualcomm) - * `_nvidia-original` - - Aliases: `_nvidia` - - Environment variables: - - *CM_MLPERF_IMPLEMENTATION*: `nvidia` - - *CM_SQUAD_ACCURACY_DTYPE*: `float16` - - *CM_IMAGENET_ACCURACY_DTYPE*: `int32` - - *CM_CNNDM_ACCURACY_DTYPE*: `int32` - - *CM_LIBRISPEECH_ACCURACY_DTYPE*: `int8` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda-devices - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_CUDA_DEVICE_PROP_GLOBAL_MEMORY': ['yes', 'on']}` - - CM script: [get-cuda-devices](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda-devices) - 1. ***Read "prehook_deps" on other CM scripts*** - * reproduce,mlperf,nvidia,inference,_run_harness - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_SKIP_RUN': [True]}` - * CM names: `--adr.['nvidia-original-mlperf-inference', 'nvidia-harness', 'mlperf-inference-implementation']...` - - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) - * **`_reference`** (default) - - Aliases: `_mlcommons-python,_python` - - Environment variables: - - *CM_MLPERF_PYTHON*: `yes` - - *CM_MLPERF_IMPLEMENTATION*: `mlcommons_python` - - *CM_SQUAD_ACCURACY_DTYPE*: `float32` - - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` - - *CM_OPENIMAGES_ACCURACY_DTYPE*: `float32` - - *CM_LIBRISPEECH_ACCURACY_DTYPE*: `float32` - - Workflow: - 1. ***Read "prehook_deps" on other CM scripts*** - * app,mlperf,reference,inference - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_SKIP_RUN': [True]}` - * CM names: `--adr.['python-reference-mlperf-inference', 'mlperf-inference-implementation']...` - - CM script: [app-mlperf-inference-mlcommons-python](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-mlcommons-python) - * `_tflite-cpp` - - Aliases: `_ctuning-cpp-tflite` - - Environment variables: - - *CM_MLPERF_TFLITE_CPP*: `yes` - - *CM_MLPERF_CPP*: `yes` - - *CM_MLPERF_IMPLEMENTATION*: `ctuning_cpp_tflite` - - *CM_IMAGENET_ACCURACY_DTYPE*: `float32` - - Workflow: - 1. ***Read "prehook_deps" on other CM scripts*** - * app,mlperf,tflite-cpp,inference - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_SKIP_RUN': [True]}` - * CM names: `--adr.['tflite-cpp-mlperf-inference', 'mlperf-inference-implementation']...` - - CM script: [app-mlperf-inference-ctuning-cpp-tflite](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-ctuning-cpp-tflite) - -
    - - - * Group "**backend**" -
    - Click here to expand this section. - - * `_deepsparse` - - Environment variables: - - *CM_MLPERF_BACKEND*: `deepsparse` - - Workflow: - * `_glow` - - Environment variables: - - *CM_MLPERF_BACKEND*: `glow` - - Workflow: - * `_ncnn` - - Environment variables: - - *CM_MLPERF_BACKEND*: `ncnn` - - Workflow: - * `_onnxruntime` - - Environment variables: - - *CM_MLPERF_BACKEND*: `onnxruntime` - - Workflow: - * `_pytorch` - - Environment variables: - - *CM_MLPERF_BACKEND*: `pytorch` - - Workflow: - * `_ray` - - Environment variables: - - *CM_MLPERF_BACKEND*: `ray` - - Workflow: - * `_tensorrt` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tensorrt` - - Workflow: - * `_tf` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tf` - - Workflow: - * `_tflite` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tflite` - - Workflow: - * `_tvm-onnx` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tvm-onnx` - - Workflow: - * `_tvm-pytorch` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tvm-pytorch` - - Workflow: - * `_tvm-tflite` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tvm-tflite` - - Workflow: - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * **`_cpu`** (default) - - Environment variables: - - *CM_MLPERF_DEVICE*: `cpu` - - Workflow: - * `_cuda` - - Environment variables: - - *CM_MLPERF_DEVICE*: `gpu` - - Workflow: - * `_qaic` - - Environment variables: - - *CM_MLPERF_DEVICE*: `qaic` - - Workflow: - * `_rocm` - - Environment variables: - - *CM_MLPERF_DEVICE*: `rocm` - - Workflow: - * `_tpu` - - Environment variables: - - *CM_MLPERF_DEVICE*: `tpu` - - Workflow: - -
    - - - * Group "**model**" -
    - Click here to expand this section. - - * `_3d-unet-99` - - Environment variables: - - *CM_MODEL*: `3d-unet-99` - - Workflow: - * `_3d-unet-99.9` - - Environment variables: - - *CM_MODEL*: `3d-unet-99.9` - - Workflow: - * `_bert-99` - - Environment variables: - - *CM_MODEL*: `bert-99` - - Workflow: - * `_bert-99.9` - - Environment variables: - - *CM_MODEL*: `bert-99.9` - - Workflow: - * `_dlrm-v2-99` - - Environment variables: - - *CM_MODEL*: `dlrm-v2-99` - - Workflow: - * `_dlrm-v2-99.9` - - Environment variables: - - *CM_MODEL*: `dlrm-v2-99.9` - - Workflow: - * `_efficientnet` - - Environment variables: - - *CM_MODEL*: `efficientnet` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,dataset-aux,imagenet-aux - - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) - 1. ***Read "posthook_deps" on other CM scripts*** - * run,accuracy,mlperf,_imagenet - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` - * CM names: `--adr.['mlperf-accuracy-script', 'imagenet-accuracy-script']...` - - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) - * `_gptj-99` - - Environment variables: - - *CM_MODEL*: `gptj-99` - - Workflow: - * `_gptj-99.9` - - Environment variables: - - *CM_MODEL*: `gptj-99.9` - - Workflow: - * `_llama2-70b-99` - - Environment variables: - - *CM_MODEL*: `llama2-70b-99` - - Workflow: - * `_llama2-70b-99.9` - - Environment variables: - - *CM_MODEL*: `llama2-70b-99.9` - - Workflow: - * `_mobilenet` - - Environment variables: - - *CM_MODEL*: `mobilenet` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,dataset-aux,imagenet-aux - - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) - 1. ***Read "posthook_deps" on other CM scripts*** - * run,accuracy,mlperf,_imagenet - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` - * CM names: `--adr.['mlperf-accuracy-script', 'imagenet-accuracy-script']...` - - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) - * **`_resnet50`** (default) - - Environment variables: - - *CM_MODEL*: `resnet50` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,dataset-aux,imagenet-aux - - CM script: [get-dataset-imagenet-aux](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-aux) - 1. ***Read "posthook_deps" on other CM scripts*** - * run,accuracy,mlperf,_imagenet - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` - * CM names: `--adr.['mlperf-accuracy-script', 'imagenet-accuracy-script']...` - - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) - * `_retinanet` - - Environment variables: - - *CM_MODEL*: `retinanet` - - Workflow: - 1. ***Read "posthook_deps" on other CM scripts*** - * run,accuracy,mlperf,_openimages - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` - * CM names: `--adr.['mlperf-accuracy-script', 'openimages-accuracy-script']...` - - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) - * `_rnnt` - - Environment variables: - - *CM_MODEL*: `rnnt` - - Workflow: - 1. ***Read "posthook_deps" on other CM scripts*** - * run,accuracy,mlperf,_librispeech - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_IMPLEMENTATION': ['nvidia']}` - * CM names: `--adr.['mlperf-accuracy-script', 'librispeech-accuracy-script']...` - - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) - * `_sdxl` - - Environment variables: - - *CM_MODEL*: `stable-diffusion-xl` - - Workflow: - 1. ***Read "posthook_deps" on other CM scripts*** - * run,accuracy,mlperf,_coco2014 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_IMPLEMENTATION': ['nvidia']}` - * CM names: `--adr.['mlperf-accuracy-script', 'coco2014-accuracy-script']...` - - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) - -
    - - - * Group "**precision**" -
    - Click here to expand this section. - - * `_bfloat16` - - Environment variables: - - *CM_MLPERF_QUANTIZATION*: `False` - - *CM_MLPERF_MODEL_PRECISION*: `float32` - - Workflow: - * `_float16` - - Environment variables: - - *CM_MLPERF_QUANTIZATION*: `False` - - *CM_MLPERF_MODEL_PRECISION*: `float32` - - Workflow: - * **`_float32`** (default) - - Aliases: `_fp32` - - Environment variables: - - *CM_MLPERF_QUANTIZATION*: `False` - - *CM_MLPERF_MODEL_PRECISION*: `float32` - - Workflow: - * `_int4` - - Environment variables: - - *CM_MLPERF_QUANTIZATION*: `True` - - *CM_MLPERF_MODEL_PRECISION*: `int4` - - Workflow: - * `_int8` - - Aliases: `_quantized` - - Environment variables: - - *CM_MLPERF_QUANTIZATION*: `True` - - *CM_MLPERF_MODEL_PRECISION*: `int8` - - Workflow: - * `_uint8` - - Environment variables: - - *CM_MLPERF_QUANTIZATION*: `True` - - *CM_MLPERF_MODEL_PRECISION*: `uint8` - - Workflow: - -
    - - - * Group "**execution-mode**" -
    - Click here to expand this section. - - * `_fast` - - Environment variables: - - *CM_FAST_FACTOR*: `5` - - *CM_OUTPUT_FOLDER_NAME*: `fast_results` - - *CM_MLPERF_RUN_STYLE*: `fast` - - Workflow: - * **`_test`** (default) - - Environment variables: - - *CM_OUTPUT_FOLDER_NAME*: `test_results` - - *CM_MLPERF_RUN_STYLE*: `test` - - Workflow: - * `_valid` - - Environment variables: - - *CM_OUTPUT_FOLDER_NAME*: `valid_results` - - *CM_MLPERF_RUN_STYLE*: `valid` - - Workflow: - -
    - - - * Group "**reproducibility**" -
    - Click here to expand this section. - - * `_r2.1_default` - - Environment variables: - - *CM_SKIP_SYS_UTILS*: `yes` - - *CM_TEST_QUERY_COUNT*: `100` - - Workflow: - * `_r3.0_default` - - Environment variables: - - *CM_SKIP_SYS_UTILS*: `yes` - - Workflow: - * `_r3.1_default` - - Workflow: - * `_r4.0_default` - - Workflow: - -
    - - - * *Internal group (variations should not be selected manually)* -
    - Click here to expand this section. - - * `_3d-unet_` - - Environment variables: - - *CM_MLPERF_MODEL_EQUAL_ISSUE_MODE*: `yes` - - Workflow: - 1. ***Read "posthook_deps" on other CM scripts*** - * run,accuracy,mlperf,_kits19,_int8 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_IMPLEMENTATION': ['nvidia']}` - * CM names: `--adr.['mlperf-accuracy-script', '3d-unet-accuracy-script']...` - - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) - * `_bert_` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,dataset,squad,language-processing - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_DATASET_SQUAD_VAL_PATH': 'on'}` - - CM script: [get-dataset-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad) - * get,dataset-aux,squad-vocab - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_ML_MODEL_BERT_VOCAB_FILE_WITH_PATH': 'on'}` - - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) - 1. ***Read "posthook_deps" on other CM scripts*** - * run,accuracy,mlperf,_squad - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` - * CM names: `--adr.['squad-accuracy-script', 'mlperf-accuracy-script']...` - - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) - * `_dlrm_` - - Workflow: - 1. ***Read "posthook_deps" on other CM scripts*** - * run,accuracy,mlperf,_terabyte,_float32 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` - * CM names: `--adr.['terabyte-accuracy-script', 'mlperf-accuracy-script']...` - - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) - * `_gptj_` - - Aliases: `_gptj` - - Environment variables: - - *CM_MLPERF_MODEL_EQUAL_ISSUE_MODE*: `yes` - - Workflow: - 1. ***Read "posthook_deps" on other CM scripts*** - * run,accuracy,mlperf,_cnndm - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_IMPLEMENTATION': ['intel']}` - * CM names: `--adr.['cnndm-accuracy-script', 'mlperf-accuracy-script']...` - - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) - * `_intel-original,gptj_` - - Workflow: - * `_llama2-70b_` - - Environment variables: - - *CM_MLPERF_MODEL_EQUAL_ISSUE_MODE*: `yes` - - Workflow: - 1. ***Read "posthook_deps" on other CM scripts*** - * run,accuracy,mlperf,_open-orca,_int32 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_LOADGEN_MODE': ['accuracy', 'all'], 'CM_MLPERF_ACCURACY_RESULTS_DIR': ['on']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_IMPLEMENTATION': ['nvidia']}` - * CM names: `--adr.['mlperf-accuracy-script', 'open-orca-accuracy-script']...` - - CM script: [process-mlperf-accuracy](https://github.com/mlcommons/cm4mlops/tree/master/script/process-mlperf-accuracy) - * `_reference,bert_` - - Workflow: - * `_reference,dlrm-v2_` - - Workflow: - * `_reference,gptj_` - - Workflow: - * `_reference,llama2-70b_` - - Workflow: - * `_reference,sdxl_` - - Workflow: - -
    - - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_intel-original,bert-99` - - Workflow: - * `_intel-original,bert-99.9` - - Workflow: - * `_intel-original,gptj-99` - - Workflow: - * `_intel-original,gptj-99.9` - - Workflow: - * `_intel-original,gptj_,build-harness` - - Workflow: - * `_intel-original,resnet50` - - Workflow: - * `_intel-original,retinanet` - - Workflow: - * `_kilt,qaic,bert-99` - - Workflow: - * `_kilt,qaic,bert-99.9` - - Workflow: - * `_kilt,qaic,resnet50` - - Workflow: - * `_kilt,qaic,retinanet` - - Workflow: - * `_power` - - Environment variables: - - *CM_MLPERF_POWER*: `yes` - - *CM_SYSTEM_POWER*: `yes` - - Workflow: - * `_reference,resnet50` - - Workflow: - * `_reference,retinanet` - - Workflow: - * `_rnnt,reference` - - Environment variables: - - *CM_MLPERF_PRINT_SUMMARY*: `no` - - Workflow: - * `_valid,retinanet` - - Workflow: - -
    - - - * Group "**batch_size**" -
    - Click here to expand this section. - - * `_batch_size.#` - - Environment variables: - - *CM_MLPERF_LOADGEN_MAX_BATCHSIZE*: `#` - - Workflow: - -
    - - - * Group "**loadgen-scenario**" -
    - Click here to expand this section. - - * `_multistream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` - - Workflow: - * **`_offline`** (default) - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` - - Workflow: - * `_server` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` - - Workflow: - * `_singlestream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` - - Workflow: - -
    - - -#### Unsupported or invalid variation combinations - - - -* `_resnet50,_pytorch` -* `_retinanet,_tf` -* `_nvidia-original,_tf` -* `_nvidia-original,_onnxruntime` -* `_nvidia-original,_pytorch` -* `_nvidia,_tf` -* `_nvidia,_onnxruntime` -* `_nvidia,_pytorch` -* `_gptj,_tf` - -#### Default variations - -`_cpu,_float32,_offline,_reference,_resnet50,_test` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` -* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` -* `--debug=value` → `CM_DEBUG_SCRIPT_BENCHMARK_PROGRAM=value` -* `--docker=value` → `CM_RUN_DOCKER_CONTAINER=value` -* `--gpu_name=value` → `CM_NVIDIA_GPU_NAME=value` -* `--hw_name=value` → `CM_HW_NAME=value` -* `--imagenet_path=value` → `IMAGENET_PATH=value` -* `--max_amps=value` → `CM_MLPERF_POWER_MAX_AMPS=value` -* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` -* `--max_volts=value` → `CM_MLPERF_POWER_MAX_VOLTS=value` -* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` -* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` -* `--ntp_server=value` → `CM_MLPERF_POWER_NTP_SERVER=value` -* `--num_threads=value` → `CM_NUM_THREADS=value` -* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` -* `--output_dir=value` → `OUTPUT_BASE_DIR=value` -* `--power=value` → `CM_MLPERF_POWER=value` -* `--power_server=value` → `CM_MLPERF_POWER_SERVER_ADDRESS=value` -* `--readme=value` → `CM_MLPERF_README=value` -* `--regenerate_files=value` → `CM_REGENERATE_MEASURE_FILES=value` -* `--rerun=value` → `CM_RERUN=value` -* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` -* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` -* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` -* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` -* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` -* `--test_query_count=value` → `CM_TEST_QUERY_COUNT=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "clean":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_LOADGEN_MODE: `accuracy` -* CM_MLPERF_LOADGEN_SCENARIO: `Offline` -* CM_OUTPUT_FOLDER_NAME: `test_results` -* CM_MLPERF_RUN_STYLE: `test` -* CM_TEST_QUERY_COUNT: `10` -* CM_MLPERF_QUANTIZATION: `False` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,mlperf,inference,utils - - CM script: [get-mlperf-inference-utils](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-utils) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/_cm.yaml) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/run.sh) - 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/_cm.yaml)*** - * get,mlperf,sut,description - - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference/_cm.yaml) - -___ -### Script output -`cmr "app vision language mlcommons mlperf inference generic [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_MLPERF_*` -#### New environment keys auto-detected from customize - -* `CM_MLPERF_LOADGEN_COMPLIANCE_TEST` \ No newline at end of file diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf/index.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf/index.md deleted file mode 100644 index 37dfa75d4..000000000 --- a/docs/Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf/index.md +++ /dev/null @@ -1,152 +0,0 @@ -Automatically generated README for this automation recipe: **benchmark-program-mlperf** - -Category: **Modular MLPerf inference benchmark pipeline** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=benchmark-program-mlperf,cfff0132a8aa4018) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *mlperf,benchmark-mlperf* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "mlperf benchmark-mlperf" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=mlperf,benchmark-mlperf` - -`cm run script --tags=mlperf,benchmark-mlperf[,variations] ` - -*or* - -`cmr "mlperf benchmark-mlperf"` - -`cmr "mlperf benchmark-mlperf [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'mlperf,benchmark-mlperf' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="mlperf,benchmark-mlperf"``` - -#### Run this script via Docker (beta) - -`cm docker script "mlperf benchmark-mlperf[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**power-mode**" -
    - Click here to expand this section. - - * **`_no-power`** (default) - - Workflow: - 1. ***Read "post_deps" on other CM scripts*** - * benchmark-program,program - * CM names: `--adr.['benchmark-program']...` - - CM script: [benchmark-program](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program) - * `_power` - - Environment variables: - - *CM_MLPERF_POWER*: `yes` - - Workflow: - 1. ***Read "prehook_deps" on other CM scripts*** - * benchmark-program,program - * CM names: `--adr.['benchmark-program']...` - - CM script: [benchmark-program](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program) - 1. ***Read "post_deps" on other CM scripts*** - * run,mlperf,power,client - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_LOADGEN_MODE': ['performance']}` - * CM names: `--adr.['mlperf-power-client']...` - - CM script: [run-mlperf-power-client](https://github.com/mlcommons/cm4mlops/tree/master/script/run-mlperf-power-client) - -
    - - -#### Default variations - -`_no-power` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/benchmark-program-mlperf/_cm.json) - -___ -### Script output -`cmr "mlperf benchmark-mlperf [,variations]" -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/index.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/index.md new file mode 100644 index 000000000..171a7af96 --- /dev/null +++ b/docs/Modular-MLPerf-inference-benchmark-pipeline/index.md @@ -0,0 +1,9 @@ +The Modular MLPerf inference benchmark pipeline category contains the following scripts: + +- [app-loadgen-generic-python](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-loadgen-generic-python/README.md) +- [app-mlperf-inference](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-mlperf-inference/README.md) +- [app-mlperf-inference-ctuning-cpp-tflite](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-mlperf-inference-ctuning-cpp-tflite/README.md) +- [app-mlperf-inference-mlcommons-cpp](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-mlperf-inference-mlcommons-cpp/README.md) +- [app-mlperf-inference-mlcommons-python](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-mlperf-inference-mlcommons-python/README.md) +- [benchmark-program-mlperf](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/benchmark-program-mlperf/README.md) +- [run-mlperf-inference-app](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/run-mlperf-inference-app/README.md) diff --git a/docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app/index.md b/docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app/index.md deleted file mode 100644 index 641d37d54..000000000 --- a/docs/Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app/index.md +++ /dev/null @@ -1,405 +0,0 @@ -Automatically generated README for this automation recipe: **run-mlperf-inference-app** - -Category: **Modular MLPerf inference benchmark pipeline** - -License: **Apache 2.0** - -Developers: [Arjun Suresh](https://www.linkedin.com/in/arjunsuresh), [Grigori Fursin](https://cKnowledge.org/gfursin) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-mlperf-inference-app,4a5d5b13fd7e4ac8) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *run-mlperf,inference* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run-mlperf,inference" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run-mlperf,inference` - -`cm run script --tags=run-mlperf,inference[,variations] [--input_flags]` - -*or* - -`cmr "run-mlperf,inference"` - -`cmr "run-mlperf,inference [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - - -#### Input Flags - -* --**division**=MLPerf division {open,closed} (*open*) -* --**category**=MLPerf category {edge,datacenter,network} (*edge*) -* --**device**=MLPerf device {cpu,cuda,rocm,qaic} (*cpu*) -* --**model**=MLPerf model {resnet50,retinanet,bert-99,bert-99.9,3d-unet-99,3d-unet-99.9,rnnt,dlrm-v2-99,dlrm-v2-99.9,gptj-99,gptj-99.9,sdxl,llama2-70b-99,llama2-70b-99.9,mobilenet,efficientnet} (*resnet50*) -* --**precision**=MLPerf model precision {float32,float16,bfloat16,int8,uint8} -* --**implementation**=MLPerf implementation {mlcommons-python,mlcommons-cpp,nvidia,intel,qualcomm,ctuning-cpp-tflite} (*mlcommons-python*) -* --**backend**=MLPerf framework (backend) {onnxruntime,tf,pytorch,deepsparse,tensorrt,glow,tvm-onnx} (*onnxruntime*) -* --**scenario**=MLPerf scenario {Offline,Server,SingleStream,MultiStream} (*Offline*) -* --**mode**=MLPerf benchmark mode {,accuracy,performance} -* --**execution_mode**=MLPerf execution mode {test,fast,valid} (*test*) -* --**sut**=SUT configuration (if known) -* --**submitter**=Submitter name (without space) (*CTuning*) -* --**results_dir**=Folder path to store results (defaults to the current working directory) -* --**submission_dir**=Folder path to store MLPerf submission tree -* --**adr.compiler.tags**=Compiler for loadgen and any C/C++ part of implementation -* --**adr.inference-src-loadgen.env.CM_GIT_URL**=Git URL for MLPerf inference sources to build LoadGen (to enable non-reference implementations) -* --**adr.inference-src.env.CM_GIT_URL**=Git URL for MLPerf inference sources to run benchmarks (to enable non-reference implementations) -* --**adr.mlperf-inference-implementation.max_batchsize**=Maximum batchsize to be used -* --**adr.mlperf-inference-implementation.num_threads**=Number of threads (reference & C++ implementation only) -* --**adr.python.name**=Python virtual environment name (optional) -* --**adr.python.version**=Force Python version (must have all system deps) -* --**adr.python.version_min**=Minimal Python version (*3.8*) -* --**power**=Measure power {yes,no} (*no*) -* --**adr.mlperf-power-client.power_server**=MLPerf Power server IP address (*192.168.0.15*) -* --**adr.mlperf-power-client.port**=MLPerf Power server port (*4950*) -* --**clean**=Clean run (*False*) -* --**compliance**=Whether to run compliance tests (applicable only for closed division) {yes,no} (*no*) -* --**dashboard_wb_project**=W&B dashboard project (*cm-mlperf-dse-testing*) -* --**dashboard_wb_user**=W&B dashboard user (*cmind*) -* --**hw_name**=MLPerf hardware name (for example "gcp.c3_standard_8", "nvidia_orin", "lenovo_p14s_gen_4_windows_11", "macbook_pro_m1_2", "thundercomm_rb6" ...) -* --**multistream_target_latency**=Set MultiStream target latency -* --**offline_target_qps**=Set LoadGen Offline target QPS -* --**quiet**=Quiet run (select default values for all questions) (*True*) -* --**server_target_qps**=Set Server target QPS -* --**singlestream_target_latency**=Set SingleStream target latency -* --**target_latency**=Set Target latency -* --**target_qps**=Set LoadGen target QPS -* --**j**=Print results dictionary to console at the end of the run (*False*) -* --**repro**=Record input/output/state/info files to make it easier to reproduce results (*False*) -* --**time**=Print script execution time at the end of the run (*True*) -* --**debug**=Debug this script (*False*) - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "division":...} -``` -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run-mlperf,inference' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run-mlperf,inference"``` - -#### Run this script via Docker (beta) - -`cm docker script "run-mlperf,inference[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_all-scenarios` - - Environment variables: - - *CM_MLPERF_LOADGEN_ALL_SCENARIOS*: `yes` - - Workflow: - * `_compliance` - - Environment variables: - - *CM_MLPERF_LOADGEN_COMPLIANCE*: `yes` - - Workflow: - * `_dashboard` - - Environment variables: - - *CM_MLPERF_DASHBOARD*: `on` - - Workflow: - -
    - - - * Group "**benchmark-version**" -
    - Click here to expand this section. - - * `_r2.1` - - Environment variables: - - *CM_MLPERF_INFERENCE_VERSION*: `2.1` - - *CM_RUN_MLPERF_INFERENCE_APP_DEFAULTS*: `r2.1_default` - - Workflow: - * `_r3.0` - - Environment variables: - - *CM_MLPERF_INFERENCE_VERSION*: `3.0` - - *CM_RUN_MLPERF_INFERENCE_APP_DEFAULTS*: `r3.0_default` - - Workflow: - * `_r3.1` - - Environment variables: - - *CM_MLPERF_INFERENCE_VERSION*: `3.1` - - *CM_RUN_MLPERF_INFERENCE_APP_DEFAULTS*: `r3.1_default` - - Workflow: - * `_r4.0` - - Environment variables: - - *CM_MLPERF_INFERENCE_VERSION*: `4.0` - - *CM_RUN_MLPERF_INFERENCE_APP_DEFAULTS*: `r4.0_default` - - Workflow: - -
    - - - * Group "**mode**" -
    - Click here to expand this section. - - * `_all-modes` - - Environment variables: - - *CM_MLPERF_LOADGEN_ALL_MODES*: `yes` - - Workflow: - -
    - - - * Group "**submission-generation**" -
    - Click here to expand this section. - - * `_accuracy-only` - - Environment variables: - - *CM_MLPERF_LOADGEN_MODE*: `accuracy` - - *CM_MLPERF_SUBMISSION_RUN*: `yes` - - *CM_RUN_MLPERF_ACCURACY*: `on` - - *CM_RUN_SUBMISSION_CHECKER*: `no` - - Workflow: - * **`_find-performance`** (default) - - Environment variables: - - *CM_MLPERF_FIND_PERFORMANCE_MODE*: `yes` - - *CM_MLPERF_LOADGEN_ALL_MODES*: `no` - - *CM_MLPERF_LOADGEN_MODE*: `performance` - - *CM_MLPERF_RESULT_PUSH_TO_GITHUB*: `False` - - Workflow: - * `_performance-only` - - Environment variables: - - *CM_MLPERF_LOADGEN_MODE*: `performance` - - *CM_MLPERF_SUBMISSION_RUN*: `yes` - - *CM_RUN_SUBMISSION_CHECKER*: `no` - - Workflow: - * `_populate-readme` - - Environment variables: - - *CM_MLPERF_README*: `yes` - - *CM_MLPERF_SUBMISSION_RUN*: `yes` - - *CM_RUN_SUBMISSION_CHECKER*: `no` - - Workflow: - * `_submission` - - Environment variables: - - *CM_MLPERF_LOADGEN_COMPLIANCE*: `yes` - - *CM_MLPERF_SUBMISSION_RUN*: `yes` - - *CM_RUN_MLPERF_ACCURACY*: `on` - - *CM_RUN_SUBMISSION_CHECKER*: `yes` - - *CM_TAR_SUBMISSION_DIR*: `yes` - - Workflow: - 1. ***Read "post_deps" on other CM scripts*** - * generate,mlperf,inference,submission - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_SUBMISSION_GENERATION': ['no', 'false', 'False', '0']}` - * CM names: `--adr.['submission-generator']...` - - CM script: [generate-mlperf-inference-submission](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-submission) - -
    - - - * Group "**submission-generation-style**" -
    - Click here to expand this section. - - * `_full` - - Environment variables: - - *CM_MLPERF_SUBMISSION_GENERATION_STYLE*: `full` - - *CM_MLPERF_SKIP_SUBMISSION_GENERATION*: `yes` - - Workflow: - * **`_short`** (default) - - Environment variables: - - *CM_MLPERF_SUBMISSION_GENERATION_STYLE*: `short` - - Workflow: - -
    - - -#### Default variations - -`_find-performance,_short` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--backend=value` → `CM_MLPERF_BACKEND=value` -* `--batch_size=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` -* `--category=value` → `CM_MLPERF_SUBMISSION_SYSTEM_TYPE=value` -* `--clean=value` → `CM_MLPERF_CLEAN_ALL=value` -* `--compliance=value` → `CM_MLPERF_LOADGEN_COMPLIANCE=value` -* `--dashboard_wb_project=value` → `CM_MLPERF_DASHBOARD_WANDB_PROJECT=value` -* `--dashboard_wb_user=value` → `CM_MLPERF_DASHBOARD_WANDB_USER=value` -* `--debug=value` → `CM_DEBUG_SCRIPT_BENCHMARK_PROGRAM=value` -* `--device=value` → `CM_MLPERF_DEVICE=value` -* `--division=value` → `CM_MLPERF_SUBMISSION_DIVISION=value` -* `--docker=value` → `CM_MLPERF_USE_DOCKER=value` -* `--dump_version_info=value` → `CM_DUMP_VERSION_INFO=value` -* `--execution_mode=value` → `CM_MLPERF_RUN_STYLE=value` -* `--find_performance=value` → `CM_MLPERF_FIND_PERFORMANCE_MODE=value` -* `--gpu_name=value` → `CM_NVIDIA_GPU_NAME=value` -* `--hw_name=value` → `CM_HW_NAME=value` -* `--hw_notes_extra=value` → `CM_MLPERF_SUT_SW_NOTES_EXTRA=value` -* `--imagenet_path=value` → `IMAGENET_PATH=value` -* `--implementation=value` → `CM_MLPERF_IMPLEMENTATION=value` -* `--lang=value` → `CM_MLPERF_IMPLEMENTATION=value` -* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` -* `--model=value` → `CM_MLPERF_MODEL=value` -* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` -* `--network=value` → `CM_NETWORK_LOADGEN=value` -* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` -* `--output_dir=value` → `OUTPUT_BASE_DIR=value` -* `--output_summary=value` → `MLPERF_INFERENCE_SUBMISSION_SUMMARY=value` -* `--output_tar=value` → `MLPERF_INFERENCE_SUBMISSION_TAR_FILE=value` -* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` -* `--power=value` → `CM_SYSTEM_POWER=value` -* `--precision=value` → `CM_MLPERF_MODEL_PRECISION=value` -* `--preprocess_submission=value` → `CM_RUN_MLPERF_SUBMISSION_PREPROCESSOR=value` -* `--push_to_github=value` → `CM_MLPERF_RESULT_PUSH_TO_GITHUB=value` -* `--readme=value` → `CM_MLPERF_README=value` -* `--regenerate_accuracy_file=value` → `CM_MLPERF_REGENERATE_ACCURACY_FILE=value` -* `--regenerate_files=value` → `CM_REGENERATE_MEASURE_FILES=value` -* `--rerun=value` → `CM_RERUN=value` -* `--results_dir=value` → `OUTPUT_BASE_DIR=value` -* `--results_git_url=value` → `CM_MLPERF_RESULTS_GIT_REPO_URL=value` -* `--run_checker=value` → `CM_RUN_SUBMISSION_CHECKER=value` -* `--run_style=value` → `CM_MLPERF_RUN_STYLE=value` -* `--save_console_log=value` → `CM_SAVE_CONSOLE_LOG=value` -* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` -* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` -* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` -* `--skip_submission_generation=value` → `CM_MLPERF_SKIP_SUBMISSION_GENERATION=value` -* `--skip_truncation=value` → `CM_SKIP_TRUNCATE_ACCURACY=value` -* `--submission_dir=value` → `CM_MLPERF_INFERENCE_SUBMISSION_DIR=value` -* `--submitter=value` → `CM_MLPERF_SUBMITTER=value` -* `--sut=value` → `CM_MLPERF_INFERENCE_SUT_VARIATION=value` -* `--sut_servers=value` → `CM_NETWORK_LOADGEN_SUT_SERVERS=value` -* `--sw_notes_extra=value` → `CM_MLPERF_SUT_SW_NOTES_EXTRA=value` -* `--system_type=value` → `CM_MLPERF_SUBMISSION_SYSTEM_TYPE=value` -* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` -* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` -* `--test_query_count=value` → `CM_TEST_QUERY_COUNT=value` -* `--threads=value` → `CM_NUM_THREADS=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "backend":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_IMPLEMENTATION: `reference` -* CM_MLPERF_MODEL: `resnet50` -* CM_MLPERF_RUN_STYLE: `test` - -
    - -#### Versions -* `master` -* `r2.1` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/_cm.yaml)*** - * detect,os - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_USE_DOCKER': [True]}` - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_USE_DOCKER': [True]}` - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,python3 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_USE_DOCKER': [True]}` - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,sut,description - - CM script: [get-mlperf-inference-sut-description](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-sut-description) - * get,mlperf,inference,results,dir - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_USE_DOCKER': [False]}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'OUTPUT_BASE_DIR': [True]}` - * CM names: `--adr.['get-mlperf-inference-results-dir']...` - - CM script: [get-mlperf-inference-results-dir](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-results-dir) - * install,pip-package,for-cmind-python,_package.tabulate - - CM script: [install-pip-package-for-cmind-python](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pip-package-for-cmind-python) - * get,mlperf,inference,utils - - CM script: [get-mlperf-inference-utils](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-utils) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/_cm.yaml) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-mlperf-inference-app/_cm.yaml) - -___ -### Script output -`cmr "run-mlperf,inference [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia/index.md b/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia/index.md deleted file mode 100644 index baf219a06..000000000 --- a/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia/index.md +++ /dev/null @@ -1,242 +0,0 @@ -Automatically generated README for this automation recipe: **app-mlperf-training-nvidia** - -Category: **Modular MLPerf training benchmark pipeline** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-training-nvidia,1e2e357618cc4674) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *app,vision,language,mlcommons,mlperf,training,nvidia* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "app vision language mlcommons mlperf training nvidia" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=app,vision,language,mlcommons,mlperf,training,nvidia` - -`cm run script --tags=app,vision,language,mlcommons,mlperf,training,nvidia[,variations] [--input_flags]` - -*or* - -`cmr "app vision language mlcommons mlperf training nvidia"` - -`cmr "app vision language mlcommons mlperf training nvidia [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'app,vision,language,mlcommons,mlperf,training,nvidia' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="app,vision,language,mlcommons,mlperf,training,nvidia"``` - -#### Run this script via Docker (beta) - -`cm docker script "app vision language mlcommons mlperf training nvidia[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_bert` - - Environment variables: - - *CM_MLPERF_MODEL*: `bert` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_protobuf - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` - * CM names: `--adr.['protobuf']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch - * CM names: `--adr.['ml-engine-pytorch']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * **`_cuda`** (default) - - Environment variables: - - *CM_MLPERF_DEVICE*: `cuda` - - *USE_CUDA*: `True` - - Workflow: - * `_tpu` - - Environment variables: - - *CM_MLPERF_DEVICE*: `tpu` - - *CUDA_VISIBLE_DEVICES*: `` - - *USE_CUDA*: `False` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * `_pytorch` - - Environment variables: - - *CM_MLPERF_BACKEND*: `pytorch` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - Workflow: - * `_tf` - - Aliases: `_tensorflow` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tf` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - Workflow: - -
    - - -#### Default variations - -`_cuda` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` -* `--docker=value` → `CM_RUN_DOCKER_CONTAINER=value` -* `--hw_name=value` → `CM_HW_NAME=value` -* `--model=value` → `CM_MLPERF_CUSTOM_MODEL_PATH=value` -* `--num_threads=value` → `CM_NUM_THREADS=value` -* `--output_dir=value` → `OUTPUT_BASE_DIR=value` -* `--rerun=value` → `CM_RERUN=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "clean":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `nvidia` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlperf,training,src - * CM names: `--adr.['training-src', 'mlperf-training-src']...` - - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) - * get,git,repo,_repo.https://github.com/mlcommons/training_results_v2.1 - * CM names: `--adr.['training-results', 'mlperf-training-results']...` - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - * get,cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_DEVICE': ['cuda']}` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,generic-python-lib,_torchvision_cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['cuda']}` - * CM names: `--adr.['ml-engine-torchvision']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_mlperf_logging - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * prepare,mlperf,training,data,bert,_nvidia - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_MODEL': ['bert']}` - * CM names: `--adr.['prepare-data', 'bert-model']...` - - CM script: [prepare-training-data-bert](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-bert) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/_cm.yaml) - 1. ***Run native script if exists*** - * [run-bert-training.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/run-bert-training.sh) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-nvidia/_cm.yaml) - -___ -### Script output -`cmr "app vision language mlcommons mlperf training nvidia [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -* `CM_HW_NAME` -* `CM_MLPERF_*` -* `CM_ML_MODEL_*` -#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference/index.md b/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference/index.md deleted file mode 100644 index 91149ed22..000000000 --- a/docs/Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference/index.md +++ /dev/null @@ -1,240 +0,0 @@ -Automatically generated README for this automation recipe: **app-mlperf-training-reference** - -Category: **Modular MLPerf training benchmark pipeline** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-training-reference,0c4b11bdcf494b4f) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *app,vision,language,mlcommons,mlperf,training,reference,ref* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "app vision language mlcommons mlperf training reference ref" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=app,vision,language,mlcommons,mlperf,training,reference,ref` - -`cm run script --tags=app,vision,language,mlcommons,mlperf,training,reference,ref[,variations] [--input_flags]` - -*or* - -`cmr "app vision language mlcommons mlperf training reference ref"` - -`cmr "app vision language mlcommons mlperf training reference ref [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'app,vision,language,mlcommons,mlperf,training,reference,ref' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="app,vision,language,mlcommons,mlperf,training,reference,ref"``` - -#### Run this script via Docker (beta) - -`cm docker script "app vision language mlcommons mlperf training reference ref[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_bert` - - Environment variables: - - *CM_MLPERF_MODEL*: `bert` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_protobuf - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['tf', 'tflite']}` - * CM names: `--adr.['protobuf']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch - * CM names: `--adr.['ml-engine-pytorch']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * **`_cuda`** (default) - - Environment variables: - - *CM_MLPERF_DEVICE*: `cuda` - - *USE_CUDA*: `True` - - Workflow: - * `_tpu` - - Environment variables: - - *CM_MLPERF_DEVICE*: `tpu` - - *CUDA_VISIBLE_DEVICES*: `` - - *USE_CUDA*: `False` - - Workflow: - -
    - - - * Group "**framework**" -
    - Click here to expand this section. - - * `_pytorch` - - Environment variables: - - *CM_MLPERF_BACKEND*: `pytorch` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - Workflow: - * `_tf` - - Aliases: `_tensorflow` - - Environment variables: - - *CM_MLPERF_BACKEND*: `tf` - - *CM_MLPERF_BACKEND_VERSION*: `<<>>` - - Workflow: - -
    - - -#### Default variations - -`_cuda` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--clean=value` → `CM_MLPERF_CLEAN_SUBMISSION_DIR=value` -* `--docker=value` → `CM_RUN_DOCKER_CONTAINER=value` -* `--hw_name=value` → `CM_HW_NAME=value` -* `--model=value` → `CM_MLPERF_CUSTOM_MODEL_PATH=value` -* `--num_threads=value` → `CM_NUM_THREADS=value` -* `--output_dir=value` → `OUTPUT_BASE_DIR=value` -* `--rerun=value` → `CM_RERUN=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "clean":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `reference` -* CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX: `` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,mlperf,training,src - * CM names: `--adr.['training-src']...` - - CM script: [get-mlperf-training-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-src) - * get,cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_DEVICE': ['cuda']}` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,generic-python-lib,_torchvision_cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_BACKEND': ['pytorch'], 'CM_MLPERF_DEVICE': ['cuda']}` - * CM names: `--adr.['ml-engine-torchvision']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_mlperf_logging - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * prepare,mlperf,training,data,bert,_reference - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_MODEL': ['bert']}` - * CM names: `--adr.['prepare-data', 'bert-model']...` - - CM script: [prepare-training-data-bert](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-bert) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/_cm.yaml) - 1. ***Run native script if exists*** - * [run-bert-training.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/run-bert-training.sh) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-training-reference/_cm.yaml) - -___ -### Script output -`cmr "app vision language mlcommons mlperf training reference ref [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_DATASET_*` -* `CM_HW_NAME` -* `CM_MLPERF_*` -* `CM_ML_MODEL_*` -#### New environment keys auto-detected from customize diff --git a/docs/Modular-MLPerf-training-benchmark-pipeline/index.md b/docs/Modular-MLPerf-training-benchmark-pipeline/index.md new file mode 100644 index 000000000..31fcc33fc --- /dev/null +++ b/docs/Modular-MLPerf-training-benchmark-pipeline/index.md @@ -0,0 +1,4 @@ +The Modular MLPerf training benchmark pipeline category contains the following scripts: + +- [app-mlperf-training-nvidia](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-mlperf-training-nvidia/README.md) +- [app-mlperf-training-reference](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-mlperf-training-reference/README.md) diff --git a/docs/Modular-application-pipeline/app-image-corner-detection/index.md b/docs/Modular-application-pipeline/app-image-corner-detection/index.md deleted file mode 100644 index 933030b4f..000000000 --- a/docs/Modular-application-pipeline/app-image-corner-detection/index.md +++ /dev/null @@ -1,129 +0,0 @@ -Automatically generated README for this automation recipe: **app-image-corner-detection** - -Category: **Modular application pipeline** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-image-corner-detection,998ffee0bc534d0a) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *app,image,corner-detection* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "app image corner-detection" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=app,image,corner-detection` - -`cm run script --tags=app,image,corner-detection ` - -*or* - -`cmr "app image corner-detection"` - -`cmr "app image corner-detection " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'app,image,corner-detection' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="app,image,corner-detection"``` - -#### Run this script via Docker (beta) - -`cm docker script "app image corner-detection" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/run.sh) - 1. ***Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/_cm.json)*** - * compile,cpp-program - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_SKIP_COMPILE': ['on']}` - - CM script: [compile-program](https://github.com/mlcommons/cm4mlops/tree/master/script/compile-program) - * benchmark-program - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_SKIP_RUN': ['on']}` - - CM script: [benchmark-program](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-image-corner-detection/_cm.json) - -___ -### Script output -`cmr "app image corner-detection " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Modular-application-pipeline/index.md b/docs/Modular-application-pipeline/index.md new file mode 100644 index 000000000..317764859 --- /dev/null +++ b/docs/Modular-application-pipeline/index.md @@ -0,0 +1,3 @@ +The Modular application pipeline category contains the following scripts: + +- [app-image-corner-detection](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-image-corner-detection/README.md) diff --git a/docs/Platform-information/detect-cpu/index.md b/docs/Platform-information/detect-cpu/index.md deleted file mode 100644 index 353ee6d4b..000000000 --- a/docs/Platform-information/detect-cpu/index.md +++ /dev/null @@ -1,128 +0,0 @@ -Automatically generated README for this automation recipe: **detect-cpu** - -Category: **Platform information** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=detect-cpu,586c8a43320142f7) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *detect,cpu,detect-cpu,info* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "detect cpu detect-cpu info" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=detect,cpu,detect-cpu,info` - -`cm run script --tags=detect,cpu,detect-cpu,info ` - -*or* - -`cmr "detect cpu detect-cpu info"` - -`cmr "detect cpu detect-cpu info " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'detect,cpu,detect-cpu,info' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="detect,cpu,detect-cpu,info"``` - -#### Run this script via Docker (beta) - -`cm docker script "detect cpu detect-cpu info" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-cpu/_cm.json) - -___ -### Script output -`cmr "detect cpu detect-cpu info " -j` -#### New environment keys (filter) - -* `CM_HOST_CPU_*` -* `CM_HOST_DISK_CAPACITY` -* `CM_HOST_MEMORY_CAPACITY` -#### New environment keys auto-detected from customize - -* `CM_HOST_CPU_PHYSICAL_CORES_PER_SOCKET` -* `CM_HOST_CPU_SOCKETS` -* `CM_HOST_CPU_THREADS_PER_CORE` -* `CM_HOST_CPU_TOTAL_LOGICAL_CORES` \ No newline at end of file diff --git a/docs/Platform-information/detect-os/index.md b/docs/Platform-information/detect-os/index.md deleted file mode 100644 index 07061659a..000000000 --- a/docs/Platform-information/detect-os/index.md +++ /dev/null @@ -1,138 +0,0 @@ -Automatically generated README for this automation recipe: **detect-os** - -Category: **Platform information** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=detect-os,863735b7db8c44fc) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *detect-os,detect,os,info* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "detect-os detect os info" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=detect-os,detect,os,info` - -`cm run script --tags=detect-os,detect,os,info ` - -*or* - -`cmr "detect-os detect os info"` - -`cmr "detect-os detect os info " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'detect-os,detect,os,info' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="detect-os,detect,os,info"``` - -#### Run this script via Docker (beta) - -`cm docker script "detect-os detect os info" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/detect-os/_cm.json)*** - * get,sys-utils-min - * Enable this dependency only if all ENV vars are set:
    -`{'CM_HOST_OS_TYPE': ['windows']}` - - CM script: [get-sys-utils-min](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-min) - -___ -### Script output -`cmr "detect-os detect os info " -j` -#### New environment keys (filter) - -* `+CM_HOST_OS_*` -* `+PATH` -* `CM_HOST_OS_*` -* `CM_HOST_PLATFORM_*` -* `CM_HOST_PYTHON_*` -* `CM_HOST_SYSTEM_NAME` -* `CM_RUN_STATE_DOCKER` -#### New environment keys auto-detected from customize - -* `CM_HOST_OS_BITS` -* `CM_HOST_OS_MACHINE` -* `CM_HOST_OS_PACKAGE_MANAGER` -* `CM_HOST_OS_PACKAGE_MANAGER_INSTALL_CMD` -* `CM_HOST_OS_PACKAGE_MANAGER_UPDATE_CMD` -* `CM_HOST_OS_TYPE` -* `CM_HOST_PYTHON_BITS` -* `CM_HOST_SYSTEM_NAME` \ No newline at end of file diff --git a/docs/Platform-information/index.md b/docs/Platform-information/index.md new file mode 100644 index 000000000..b19dfb152 --- /dev/null +++ b/docs/Platform-information/index.md @@ -0,0 +1,4 @@ +The Platform information category contains the following scripts: + +- [detect-cpu](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/detect-cpu/README.md) +- [detect-os](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/detect-os/README.md) diff --git a/docs/Python-automation/activate-python-venv/index.md b/docs/Python-automation/activate-python-venv/index.md deleted file mode 100644 index f2d9f47bf..000000000 --- a/docs/Python-automation/activate-python-venv/index.md +++ /dev/null @@ -1,121 +0,0 @@ -Automatically generated README for this automation recipe: **activate-python-venv** - -Category: **Python automation** - -License: **Apache 2.0** - -Developers: [Grigori Fursin](https://cKnowledge.org/gfursin) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=activate-python-venv,fcbbb84946f34c55) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *activate,python-venv* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "activate python-venv" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=activate,python-venv` - -`cm run script --tags=activate,python-venv ` - -*or* - -`cmr "activate python-venv"` - -`cmr "activate python-venv " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'activate,python-venv' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="activate,python-venv"``` - -#### Run this script via Docker (beta) - -`cm docker script "activate python-venv" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/_cm.json)*** - * install,python-venv - * CM names: `--adr.['python-venv']...` - - CM script: [install-python-venv](https://github.com/mlcommons/cm4mlops/tree/master/script/install-python-venv) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/activate-python-venv/_cm.json) - -___ -### Script output -`cmr "activate python-venv " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Python-automation/get-generic-python-lib/index.md b/docs/Python-automation/get-generic-python-lib/index.md deleted file mode 100644 index ce3f9525c..000000000 --- a/docs/Python-automation/get-generic-python-lib/index.md +++ /dev/null @@ -1,681 +0,0 @@ -Automatically generated README for this automation recipe: **get-generic-python-lib** - -Category: **Python automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-generic-python-lib,94b62a682bc44791) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,generic-python-lib* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get generic-python-lib" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,generic-python-lib` - -`cm run script --tags=get,generic-python-lib[,variations] [--input_flags]` - -*or* - -`cmr "get generic-python-lib"` - -`cmr "get generic-python-lib [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,generic-python-lib' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,generic-python-lib"``` - -#### Run this script via Docker (beta) - -`cm docker script "get generic-python-lib[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_Pillow` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `Pillow` - - Workflow: - * `_apache-tvm` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `apache-tvm` - - *CM_GENERIC_PYTHON_PIP_EXTRA*: ` --pre` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_typing_extensions - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_apex` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `apex` - - Workflow: - * `_async_timeout` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `async_timeout` - - Workflow: - * `_attr` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `attr` - - Workflow: - * `_attrs` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `attrs` - - Workflow: - * `_boto3` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `boto3` - - Workflow: - * `_cloudpickle` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `cloudpickle` - - Workflow: - * `_cmind` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `cmind` - - Workflow: - * `_colored` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `colored` - - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://pypi.ngc.nvidia.com` - - Workflow: - * `_conda.#` - - Workflow: - * `_cupy` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `cupy` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_custom-python` - - Environment variables: - - *CM_TMP_USE_CUSTOM_PYTHON*: `on` - - Workflow: - * `_datasets` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `datasets` - - Workflow: - * `_decorator` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `decorator` - - Workflow: - * `_deepsparse` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `deepsparse` - - Workflow: - * `_dllogger` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `dllogger` - - *CM_GENERIC_PYTHON_PIP_URL*: `git+https://github.com/NVIDIA/dllogger#egg=dllogger` - - Workflow: - * `_fiftyone` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `fiftyone` - - Workflow: - * `_google-api-python-client` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `google_api_python_client` - - Workflow: - * `_google-auth-oauthlib` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `google_auth_oauthlib` - - Workflow: - * `_huggingface_hub` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `huggingface_hub` - - Workflow: - * `_inflect` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `inflect` - - Workflow: - * `_jax` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `jax` - - Workflow: - * `_jax_cuda` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `jax[cuda]` - - *CM_GENERIC_PYTHON_PIP_EXTRA*: `-f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html` - - *CM_JAX_VERSION_EXTRA*: `CUDA` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_librosa` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `librosa` - - Workflow: - * `_matplotlib` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `matplotlib` - - Workflow: - * `_mlperf_loadgen` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `mlperf_loadgen` - - *CM_GENERIC_PYTHON_PIP_URL*: `git+https://github.com/mlcommons/inference.git#subdirectory=loadgen` - - Workflow: - * `_mlperf_logging` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `mlperf_logging` - - *CM_GENERIC_PYTHON_PIP_URL*: `git+https://github.com/mlperf/logging.git` - - Workflow: - * `_mpld3` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `mpld3` - - Workflow: - * `_nibabel` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `nibabel` - - Workflow: - * `_numpy` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `numpy` - - Workflow: - * `_nvidia-apex` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `apex` - - *CM_GENERIC_PYTHON_PACKAGE_VARIANT*: `nvidia-apex` - - *CM_GENERIC_PYTHON_PIP_URL*: `git+https://github.com/nvidia/apex@0da3ffb92ee6fbe5336602f0e3989db1cd16f880` - - Workflow: - * `_nvidia-apex-from-src` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `apex` - - *CM_GENERIC_PYTHON_PACKAGE_VARIANT*: `nvidia-apex` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,generic-python-lib,_torch_cuda - * CM names: `--adr.['torch']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,git,repo,_repo.https://github.com/NVIDIA/apex,_tag.23.05 - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - * `_nvidia-dali` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `nvidia-dali-cuda120` - - *CM_GENERIC_PYTHON_PIP_EXTRA*: ` --upgrade --default-timeout=900` - - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://developer.download.nvidia.com/compute/redist` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_nvidia-pycocotools` - - Environment variables: - - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `pycocotools` - - *CM_GENERIC_PYTHON_PIP_URL*: `pycocotools@git+https://github.com/NVIDIA/cocoapi#subdirectory=PythonAPI` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_package.cython - * CM names: `--adr.['cython']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.numpy - * CM names: `--adr.['numpy']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_nvidia-pyindex` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `nvidia-pyindex` - - Workflow: - * `_nvidia-tensorrt` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `nvidia-tensorrt` - - Workflow: - * `_onnx` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnx` - - Workflow: - * `_onnx-graphsurgeon` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnx_graphsurgeon` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_package.nvidia-pyindex - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_onnxruntime` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnxruntime` - - Workflow: - * `_onnxruntime,rocm` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnxruntime-training` - - *CM_GENERIC_PYTHON_PIP_URL*: `https://download.onnxruntime.ai/onnxruntime_training-1.16.0%2Brocm56-cp3<<>>-cp3<<>>-manylinux_2_17_x86_64.manylinux2014_x86_64.whl` - - Workflow: - * `_onnxruntime_gpu` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `onnxruntime_gpu` - - *CM_ONNXRUNTIME_VERSION_EXTRA*: `GPU` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_opencv-python` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `opencv-python` - - Workflow: - * `_package.#` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `#` - - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `` - - *CM_GENERIC_PYTHON_PIP_URL*: `` - - Workflow: - * `_pandas` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `pandas` - - Workflow: - * `_path.#` - - Environment variables: - - *CM_GENERIC_PYTHON_PIP_URL*: `#` - - Workflow: - * `_pillow` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `Pillow` - - Workflow: - * `_pip` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `pip` - - Workflow: - * `_polygraphy` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `polygraphy` - - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://pypi.ngc.nvidia.com` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_colored - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_pre` - - Environment variables: - - *CM_GENERIC_PYTHON_DEV_VERSION*: `yes` - - Workflow: - * `_protobuf` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `protobuf` - - Workflow: - * `_psutil` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `psutil` - - Workflow: - * `_pycocotools` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `pycocotools` - - Workflow: - * `_pycuda` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `pycuda` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_ray` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `ray[default]` - - Workflow: - * `_requests` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `requests` - - Workflow: - * `_rocm` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,rocm - * CM names: `--adr.['rocm']...` - - CM script: [get-rocm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-rocm) - * `_safetensors` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `safetensors` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,rust-compiler - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_HOST_PLATFORM_FLAVOR': ['x86_64']}` - - CM script: [get-compiler-rust](https://github.com/mlcommons/cm4mlops/tree/master/script/get-compiler-rust) - * `_scikit-learn` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `scikit-learn` - - Workflow: - * `_scipy` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `scipy` - - Workflow: - * `_scons` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `scons` - - Workflow: - * `_setfit` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `setfit` - - Workflow: - * `_setuptools` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `setuptools` - - Workflow: - * `_six` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `six` - - Workflow: - * `_sklearn` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `sklearn` - - Workflow: - * `_sox` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `sox` - - Workflow: - * `_sparsezoo` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `sparsezoo` - - Workflow: - * `_streamlit` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `streamlit` - - Workflow: - * `_streamlit_option_menu` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `streamlit_option_menu` - - Workflow: - * `_tensorboard` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tensorboard` - - Workflow: - * `_tensorflow` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tensorflow` - - Workflow: - * `_tensorflow,rocm` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tensorflow-rocm` - - Workflow: - * `_tensorrt` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tensorrt` - - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/<<>>` - - *CM_TORCH_VERSION_EXTRA*: `CUDA` - - Workflow: - * `_tflite` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tflite` - - Workflow: - * `_tflite-runtime` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tflite-runtime` - - Workflow: - * `_tokenization` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tokenization` - - Workflow: - * `_toml` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `toml` - - Workflow: - * `_torch` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` - - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/cpu` - - Workflow: - * `_torch,pre` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` - - *CM_GENERIC_PYTHON_PIP_EXTRA*: ` --pre` - - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/nightly/cpu` - - Workflow: - * `_torch,rocm` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` - - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/rocm5.6` - - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `torch` - - Workflow: - 1. ***Read "post_deps" on other CM scripts*** - * get,generic-python-lib,_torchvision,_rocm - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchaudio,_rocm - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_torch_cuda` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` - - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL1*: `https://download.pytorch.org/whl/<<>>` - - *CM_TORCH_VERSION_EXTRA*: `CUDA` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_torch_cuda,pre` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch` - - *CM_GENERIC_PYTHON_PIP_EXTRA*: ` --pre` - - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/<<>>` - - *CM_TORCH_VERSION_EXTRA*: `CUDA` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_torch_tensorrt` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torch-tensorrt` - - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/<<>>` - - *CM_TORCH_VERSION_EXTRA*: `CUDA` - - Workflow: - * `_torchaudio` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchaudio` - - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/cpu` - - Workflow: - * `_torchaudio,rocm` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchaudio` - - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/rocm5.6` - - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `torchaudio` - - Workflow: - * `_torchaudio_cuda` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchaudio` - - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL1*: `https://download.pytorch.org/whl/<<>>` - - *CM_TORCHAUDIO_VERSION_EXTRA*: `CUDA` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_torchvision` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchvision` - - *CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL*: `https://download.pytorch.org/whl/cpu` - - Workflow: - * `_torchvision,rocm` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchvision` - - *CM_GENERIC_PYTHON_PIP_INDEX_URL*: `https://download.pytorch.org/whl/rocm5.6` - - *CM_GENERIC_PYTHON_PIP_UNINSTALL_DEPS*: `torchvision` - - Workflow: - * `_torchvision_cuda` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `torchvision` - - *CM_TORCHVISION_VERSION_EXTRA*: `CUDA` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * `_tornado` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tornado` - - Workflow: - * `_tqdm` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `tqdm` - - Workflow: - * `_transformers` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `transformers` - - Workflow: - * `_typing_extensions` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `typing_extensions` - - Workflow: - * `_ujson` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `ujson` - - Workflow: - * `_unidecode` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `unidecode` - - Workflow: - * `_url.#` - - Environment variables: - - *CM_GENERIC_PYTHON_PIP_URL*: `#` - - *CM_TMP_PYTHON_PACKAGE_FORCE_INSTALL*: `yes` - - Workflow: - * `_wandb` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `wandb` - - Workflow: - * `_west` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `west` - - Workflow: - * `_xgboost` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `xgboost` - - Workflow: - * `_xlsxwriter` - - Environment variables: - - *CM_GENERIC_PYTHON_PACKAGE_NAME*: `xlsxwriter` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--extra_index_url=value` → `CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL=value` -* `--force_install=value` → `CM_TMP_PYTHON_PACKAGE_FORCE_INSTALL=value` -* `--index_url=value` → `CM_GENERIC_PYTHON_PIP_INDEX_URL=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "extra_index_url":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,python3 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_TMP_USE_CUSTOM_PYTHON': ['on']}` - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,generic-python-lib,_pip - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_GENERIC_PYTHON_PACKAGE_NAME': ['pip']}` - * CM names: `--adr.['python-pip', 'pip']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/_cm.json)*** - * install,onnxruntime,from.src,_cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_INSTALL_ONNXRUNTIME_GPU_FROM_SRC': ['yes']}` - - CM script: [install-onnxruntime-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-onnxruntime-from-src) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-generic-python-lib/_cm.json) - -___ -### Script output -`cmr "get generic-python-lib [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_PYTHONLIB_*` -#### New environment keys auto-detected from customize diff --git a/docs/Python-automation/get-python3/index.md b/docs/Python-automation/get-python3/index.md deleted file mode 100644 index 2a011ed3c..000000000 --- a/docs/Python-automation/get-python3/index.md +++ /dev/null @@ -1,169 +0,0 @@ -Automatically generated README for this automation recipe: **get-python3** - -Category: **Python automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-python3,d0b5dd74373f4a62) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,python,python3,get-python,get-python3* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get python python3 get-python get-python3" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,python,python3,get-python,get-python3` - -`cm run script --tags=get,python,python3,get-python,get-python3[,variations] ` - -*or* - -`cmr "get python python3 get-python get-python3"` - -`cmr "get python python3 get-python get-python3 [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,python,python3,get-python,get-python3' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,python,python3,get-python,get-python3"``` - -#### Run this script via Docker (beta) - -`cm docker script "get python python3 get-python get-python3[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_conda.#` - - Environment variables: - - *CM_PYTHON_CONDA*: `yes` - - *CM_PYTHON_INSTALL_CACHE_TAGS*: `_conda.#` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic,conda-package,_name.#,_package.python - * CM names: `--adr.['conda-package', 'conda-python']...` - - CM script: [install-generic-conda-package](https://github.com/mlcommons/cm4mlops/tree/master/script/install-generic-conda-package) - * `_custom-path.#` - - Environment variables: - - *CM_PYTHON_BIN_WITH_PATH*: `#` - - Workflow: - * `_lto` - - Workflow: - * `_optimized` - - Workflow: - * `_shared` - - Workflow: - * `_with-custom-ssl` - - Workflow: - * `_with-ssl` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/_cm.json)*** - * install,python,src - * Enable this dependency only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - - CM script: [install-python-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-python-src) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-python3/_cm.json) - -___ -### Script output -`cmr "get python python3 get-python get-python3 [,variations]" -j` -#### New environment keys (filter) - -* `+C_INCLUDE_PATH` -* `+LD_LIBRARY_PATH` -* `+PATH` -* `CM_PYTHON_*` -#### New environment keys auto-detected from customize - -* `CM_PYTHON_BIN` -* `CM_PYTHON_BIN_PATH` -* `CM_PYTHON_BIN_WITH_PATH` -* `CM_PYTHON_CACHE_TAGS` -* `CM_PYTHON_MAJOR_VERSION` -* `CM_PYTHON_MINOR_VERSION` -* `CM_PYTHON_PATCH_VERSION` \ No newline at end of file diff --git a/docs/Python-automation/index.md b/docs/Python-automation/index.md new file mode 100644 index 000000000..37e65aae9 --- /dev/null +++ b/docs/Python-automation/index.md @@ -0,0 +1,8 @@ +The Python automation category contains the following scripts: + +- [activate-python-venv](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/activate-python-venv/README.md) +- [get-generic-python-lib](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-generic-python-lib/README.md) +- [get-python3](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-python3/README.md) +- [install-generic-conda-package](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-generic-conda-package/README.md) +- [install-python-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-python-src/README.md) +- [install-python-venv](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/install-python-venv/README.md) diff --git a/docs/Python-automation/install-generic-conda-package/index.md b/docs/Python-automation/install-generic-conda-package/index.md deleted file mode 100644 index 6743ef900..000000000 --- a/docs/Python-automation/install-generic-conda-package/index.md +++ /dev/null @@ -1,158 +0,0 @@ -Automatically generated README for this automation recipe: **install-generic-conda-package** - -Category: **Python automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-generic-conda-package,d9275487f5314195) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get install generic generic-conda-lib conda-lib conda-package generic-conda-package" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package` - -`cm run script --tags=get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package[,variations] ` - -*or* - -`cmr "get install generic generic-conda-lib conda-lib conda-package generic-conda-package"` - -`cmr "get install generic generic-conda-lib conda-lib conda-package generic-conda-package [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,install,generic,generic-conda-lib,conda-lib,conda-package,generic-conda-package"``` - -#### Run this script via Docker (beta) - -`cm docker script "get install generic generic-conda-lib conda-lib conda-package generic-conda-package[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_name.#` - - Workflow: - * `_package.#` - - Environment variables: - - *CM_CONDA_PKG_NAME*: `#` - - Workflow: - -
    - - - * Group "**package-source**" -
    - Click here to expand this section. - - * `_source.#` - - Environment variables: - - *CM_CONDA_PKG_SRC*: `#` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,conda - * CM names: `--adr.['conda']...` - - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) - * get,conda - * CM names: `--adr.['conda']...` - - CM script: [get-conda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-conda) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-generic-conda-package/_cm.json) - -___ -### Script output -`cmr "get install generic generic-conda-lib conda-lib conda-package generic-conda-package [,variations]" -j` -#### New environment keys (filter) - -* `CM_PYTHONLIB_*` -#### New environment keys auto-detected from customize diff --git a/docs/Python-automation/install-python-src/index.md b/docs/Python-automation/install-python-src/index.md deleted file mode 100644 index 1fd8e9eae..000000000 --- a/docs/Python-automation/install-python-src/index.md +++ /dev/null @@ -1,182 +0,0 @@ -Automatically generated README for this automation recipe: **install-python-src** - -Category: **Python automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-python-src,12d3a608afe14a1e) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,src,python,python3,src-python3,src-python* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install src python python3 src-python3 src-python" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,src,python,python3,src-python3,src-python` - -`cm run script --tags=install,src,python,python3,src-python3,src-python[,variations] ` - -*or* - -`cmr "install src python python3 src-python3 src-python"` - -`cmr "install src python python3 src-python3 src-python [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,src,python,python3,src-python3,src-python' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,src,python,python3,src-python3,src-python"``` - -#### Run this script via Docker (beta) - -`cm docker script "install src python python3 src-python3 src-python[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_lto` - - Environment variables: - - *CM_PYTHON_LTO_FLAG*: ` --lto` - - *CM_PYTHON_INSTALL_CACHE_TAGS*: `with-lto` - - Workflow: - * `_optimized` - - Environment variables: - - *CM_PYTHON_OPTIMIZATION_FLAG*: ` --enable-optimizations` - - *CM_PYTHON_INSTALL_CACHE_TAGS*: `optimized` - - Workflow: - * `_shared` - - Environment variables: - - *CM_PYTHON_INSTALL_CACHE_TAGS*: `shared` - - *CM_SHARED_BUILD*: `yes` - - Workflow: - * `_with-custom-ssl` - - Environment variables: - - *CM_CUSTOM_SSL*: `yes` - - *CM_PYTHON_INSTALL_CACHE_TAGS*: `with-custom-ssl` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,openssl - - CM script: [get-openssl](https://github.com/mlcommons/cm4mlops/tree/master/script/get-openssl) - * `_with-ssl` - - Environment variables: - - *CM_ENABLE_SSL*: `yes` - - *CM_PYTHON_INSTALL_CACHE_TAGS*: `with-ssl` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_ENABLE_SSL: `no` -* CM_CUSTOM_SSL: `no` -* CM_SHARED_BUILD: `no` -* CM_PYTHON_OPTIMIZATION_FLAG: `` -* CM_PYTHON_LTO_FLAG: `` -* CM_WGET_URL: `https://www.python.org/ftp/python/[PYTHON_VERSION]/Python-[PYTHON_VERSION].tgz` - -
    - -#### Versions -Default version: `3.10.13` - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-src/_cm.json)*** - * get,python3 - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_REQUIRE_INSTALL': ['yes']}` - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - -___ -### Script output -`cmr "install src python python3 src-python3 src-python [,variations]" -j` -#### New environment keys (filter) - -* `+C_INCLUDE_PATH` -* `+LD_LIBRARY_PATH` -* `+PATH` -* `CM_PYTHON_BIN_WITH_PATH` -* `CM_PYTHON_INSTALL_PATH` -#### New environment keys auto-detected from customize - -* `CM_PYTHON_BIN_WITH_PATH` \ No newline at end of file diff --git a/docs/Python-automation/install-python-venv/index.md b/docs/Python-automation/install-python-venv/index.md deleted file mode 100644 index 8b269d741..000000000 --- a/docs/Python-automation/install-python-venv/index.md +++ /dev/null @@ -1,152 +0,0 @@ -Automatically generated README for this automation recipe: **install-python-venv** - -Category: **Python automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=install-python-venv,7633ebada4584c6c) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *install,python,get-python-venv,python-venv* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "install python get-python-venv python-venv" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=install,python,get-python-venv,python-venv` - -`cm run script --tags=install,python,get-python-venv,python-venv[,variations] ` - -*or* - -`cmr "install python get-python-venv python-venv"` - -`cmr "install python get-python-venv python-venv [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'install,python,get-python-venv,python-venv' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="install,python,get-python-venv,python-venv"``` - -#### Run this script via Docker (beta) - -`cm docker script "install python get-python-venv python-venv[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_lto` - - Workflow: - * `_optimized` - - Workflow: - * `_shared` - - Workflow: - * `_with-custom-ssl` - - Workflow: - * `_with-ssl` - - Workflow: - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/_cm.json)*** - * get,python,-virtual - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/install-python-venv/_cm.json)*** - * get,python3 - * CM names: `--adr.['register-python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - -___ -### Script output -`cmr "install python get-python-venv python-venv [,variations]" -j` -#### New environment keys (filter) - -* `CM_PYTHON_BIN_WITH_PATH` -* `CM_VIRTUAL_ENV_*` -#### New environment keys auto-detected from customize - -* `CM_PYTHON_BIN_WITH_PATH` -* `CM_VIRTUAL_ENV_DIR` -* `CM_VIRTUAL_ENV_PATH` -* `CM_VIRTUAL_ENV_SCRIPTS_PATH` \ No newline at end of file diff --git a/docs/Remote-automation/index.md b/docs/Remote-automation/index.md new file mode 100644 index 000000000..c02cdf115 --- /dev/null +++ b/docs/Remote-automation/index.md @@ -0,0 +1,3 @@ +The Remote automation category contains the following scripts: + +- [remote-run-commands](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/remote-run-commands/README.md) diff --git a/docs/Remote-automation/remote-run-commands/index.md b/docs/Remote-automation/remote-run-commands/index.md deleted file mode 100644 index 7782b7f6f..000000000 --- a/docs/Remote-automation/remote-run-commands/index.md +++ /dev/null @@ -1,145 +0,0 @@ -Automatically generated README for this automation recipe: **remote-run-commands** - -Category: **Remote automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=remote-run-commands,b71e24b03c9d49cd) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "remote run cmds remote-run remote-run-cmds ssh-run ssh" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh` - -`cm run script --tags=remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh [--input_flags]` - -*or* - -`cmr "remote run cmds remote-run remote-run-cmds ssh-run ssh"` - -`cmr "remote run cmds remote-run remote-run-cmds ssh-run ssh " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="remote,run,cmds,remote-run,remote-run-cmds,ssh-run,ssh"``` - -#### Run this script via Docker (beta) - -`cm docker script "remote run cmds remote-run remote-run-cmds ssh-run ssh" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--client_refresh=value` → `CM_SSH_CLIENT_REFRESH=value` -* `--host=value` → `CM_SSH_HOST=value` -* `--password=value` → `CM_SSH_PASSWORD=value` -* `--port=value` → `CM_SSH_PORT=value` -* `--run_cmds=value` → `CM_SSH_RUN_COMMANDS=value` -* `--skip_host_verify=value` → `CM_SSH_SKIP_HOST_VERIFY=value` -* `--ssh_key_file=value` → `CM_SSH_KEY_FILE=value` -* `--user=value` → `CM_SSH_USER=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "client_refresh":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_SSH_PORT: `22` -* CM_SSH_HOST: `localhost` -* CM_SSH_USER: `$USER` -* CM_SSH_CLIENT_REFRESH: `10` -* CM_SSH_KEY_FILE: `$HOME/.ssh/id_rsa` - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/remote-run-commands/_cm.json) - -___ -### Script output -`cmr "remote run cmds remote-run remote-run-cmds ssh-run ssh " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia/index.md b/docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia/index.md deleted file mode 100644 index c7f83ff09..000000000 --- a/docs/Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia/index.md +++ /dev/null @@ -1,1333 +0,0 @@ -Automatically generated README for this automation recipe: **app-mlperf-inference-nvidia** - -Category: **Reproduce MLPerf benchmarks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=app-mlperf-inference-nvidia,bc3b17fb430f4732) ]* - ---- - -This script is a CM wrapper to the official [Nvidia submission code](https://github.com/mlcommons/inference_results_v3.0/tree/master/closed/NVIDIA) used for MLPerf inference submissions. - - - -## Download the needed files - -* Please ask privately in [this discord channel](https://discord.gg/y7hupJsUNb) if you would like to get access to an Amazon S3 bucket containing all the needed files for easiness. Otherwise, you can download them from the below links. - -For x86 machines, please download the latest install tar files from the below sites -1. [cuDNN](https://developer.nvidia.com/cudnn) (for cuda 11) -2. [TensorRT](https://developer.nvidia.com/tensorrt) -3. Imagenet validation set (unfortunately not available via public URL) following the instructions given [here](https://github.com/mlcommons/ck/blob/master/cm-mlops/script/get-dataset-imagenet-val/README-extra.md) - -
    - - - -## Using Docker (Recommended on x86 systems) - - -Assuming all the downloaded files are to the user home directory please do the following steps: - -1. Download CUDA 11.8 - ``` - wget https://developer.download.nvidia.com/compute/cuda/11.8.0/local_installers/cuda_11.8.0_520.61.05_linux.run - ``` -2. [Install docker](https://docs.docker.com/engine/install/) and [Nvidia container toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) - -3. Give docker permission to the current user - ``` - sudo usermod -aG docker $USER - ``` - Logout and login - Restart docker if required and confirm that Nvidia container toolkit is working by - ``` - nvidia-ctk --version - ``` -4. Check if Nvidia driver is working properly on the host. - ``` - nvidia-smi - ``` - If the above command produces any error you'll need to install Nvidia drivers on the host. You can do this via CM if you have sudo access - ``` - cmr "install cuda prebuilt _driver" --version=11.8.0 - ``` -5. Build the docker container and mount the paths from the host machine. - ** You may want to change the `scratch_path` location as it can take 100s of GBs.** - ```bash - cm docker script --tags=build,nvidia,inference,server \ - --cuda_run_file_path=$HOME/cuda_11.8.0_520.61.05_linux.run \ - --tensorrt_tar_file_path=$HOME/TensorRT-8.6.1.6.Linux.x86_64-gnu.cuda-11.8.tar.gz \ - --cudnn_tar_file_path=$HOME/cudnn-linux-x86_64-8.9.2.26_cuda11-archive.tar.xz \ - --imagenet_path=$HOME/imagenet-2012-val \ - --scratch_path=$HOME/mlperf_scratch \ - --docker_cm_repo=mlcommons@cm4mlops \ - --results_dir=$HOME/results_dir \ - --submission_dir=$HOME/submission_dir \ - --adr.compiler.tags=gcc - ``` - * Use `--docker_cache=no` to turn off docker caching - * Use `--docker_run_cmd_prefix="cm pull repo mlcommons@cm4mlops"` to update the CK repository when docker caching is used - * Use `--custom_system=no` if you are using a similar system to the [Nvidia submission systems for MLPerf inference 3.0](https://github.com/mlcommons/inference_results_v3.0/tree/main/closed/NVIDIA/systems). - -6. At the end of the build you'll get the following prompt unless you have chosen `--custom_system=no`. Please give a system name and say yes to generating the configuration files - ### Example output - ``` - ============================================ - => A system ID is a string containing only letters, numbers, and underscores - => that is used as the human-readable name of the system. It is also used as - => the system name when creating the measurements/ and results/ entries. - => This string should also start with a letter to be a valid Python enum member name. - => Specify the system ID to use for the current system: phoenix - => Reloaded system list. MATCHED_SYSTEM: KnownSystem.phoenix - => This script will generate Benchmark Configuration stubs for the detected system. - Continue? [y/n]: y - ``` - Now you'll be inside the CM Nvidia docker container and can run further scripts. - -7. Once the build is complete, you can proceed with any further CM scripts like for MLPerf inference. You can also save the container at this stage using [docker commit](https://docs.docker.com/engine/reference/commandline/commit/) so that it can be launched later without having to go through the previous steps. - -
    - -
    - - - -## Without Docker - - -1. Install CUDA - If CUDA is not detected, CM should download and install it automatically when you run the workflow. - ** Nvidia drivers are expected to be installed on the system ** - -2. Install cuDNN - ```bash - cmr "get cudnn" --tar_file= - ``` -3. Install TensorRT - ```bash - cmr "get tensorrt _dev" --tar_file= - ``` - On non x86 systems like Nvidia Orin, you can do a package manager install and then CM should pick up the installation automatically during the workflow run. - -4. Build the Nvidia inference server - ``` - cmr "build nvidia inference server" \ - --adr.install-cuda-prebuilt.local_run_file_path=/data/cuda_11.8.0_520.61.05_linux.run \ - --adr.tensorrt.tar_file=/data/TensorRT-8.6.1.6.Linux.x86_64-gnu.cuda-11.8.tar.gz \ - --adr.cudnn.tar_file=/data/cudnn-linux-x86_64-8.9.2.26_cuda11-archive.tar.xz \ - --adr.compiler.tags=gcc \ - [--custom_system=no] - ``` - Use `--custom_system=no` if you are using a similar system to the [Nvidia submission systems for MLPerf inference 3.0](https://github.com/mlcommons/inference_results_v3.0/tree/main/closed/NVIDIA/systems). - -5. At the end of the build you'll get the following prompt unless you have chosen `--custom_system=no`. Please give a system name and say yes to generating the configuration files - - ### Example output - ``` - ============================================ - => A system ID is a string containing only letters, numbers, and underscores - => that is used as the human-readable name of the system. It is also used as - => the system name when creating the measurements/ and results/ entries. - => This string should also start with a letter to be a valid Python enum member name. - => Specify the system ID to use for the current system: phoenix - => Reloaded system list. MATCHED_SYSTEM: KnownSystem.phoenix - => This script will generate Benchmark Configuration stubs for the detected system. - Continue? [y/n]: y - ``` -
    - - -## Acknowledgments - -* A common CM interface and automation for MLPerf inference benchmarks was developed by Arjun Suresh and Grigori Fursin - sponsored by the [cTuning foundation](https://cTuning.org) and [cKnowledge.org](https://cKnowledge.org). -* Nvidia's MLPerf inference implementation was developed by Zhihan Jiang, Ethan Cheng, Yiheng Zhang and Jinho Suh. - - - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "reproduce mlcommons mlperf inference harness nvidia-harness nvidia" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia` - -`cm run script --tags=reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia[,variations] [--input_flags]` - -*or* - -`cmr "reproduce mlcommons mlperf inference harness nvidia-harness nvidia"` - -`cmr "reproduce mlcommons mlperf inference harness nvidia-harness nvidia [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia"``` - -#### Run this script via Docker (beta) - -`cm docker script "reproduce mlcommons mlperf inference harness nvidia-harness nvidia[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *Internal group (variations should not be selected manually)* -
    - Click here to expand this section. - - * `_3d-unet_` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_transformers - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.nibabel - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pandas - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_bert_` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_transformers - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_safetensors - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnx - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_dlrm_` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_torch - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.torchsnapshot - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.torchrec - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.fbgemm-gpu - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnx-graphsurgeon - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.scikit-learn - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_gptj_` - - Environment variables: - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://cloud.mlcommons.org/index.php/s/QAZ2oM94MkFtbQx/download` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_package.datasets - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.simplejson - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - -
    - - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_a100,sxm,3d-unet_,offline,run_harness` - - Workflow: - * `_a100,sxm,bert_,offline,run_harness` - - Workflow: - * `_a100,sxm,dlrm_,offline,run_harness` - - Workflow: - * `_a100,sxm,resnet50,offline,run_harness` - - Environment variables: - - *CM_MLPERF_PERFORMANCE_SAMPLE_COUNT*: `2048` - - Workflow: - * `_a100,sxm,retinanet,offline,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_WORKSPACE_SIZE*: `300000000000` - - Workflow: - * `_a100,sxm,rnnt,offline,run_harness` - - Workflow: - * `_gptj_,build` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * install,pytorch,from.src,_for-nvidia-mlperf-inference-v3.1 - - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) - * get,cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * `_gptj_,build_engine` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * install,pytorch,from.src,_for-nvidia-mlperf-inference-v3.1 - - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) - * get,cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * `_gptj_,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_USE_FP8*: `True` - - *CM_MLPERF_NVIDIA_HARNESS_ENABLE_SORT*: `True` - - *CM_MLPERF_NVIDIA_HARNESS_NUM_SORT_SEGMENTS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_SKIP_POSTPROCESS*: `True` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * install,pytorch,from.src,_for-nvidia-mlperf-inference-v3.1 - - CM script: [install-pytorch-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-pytorch-from-src) - * get,cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * `_gpu_memory.16,3d-unet_,offline,run_harness` - - Workflow: - * `_gpu_memory.16,bert_,offline,run_harness` - - Workflow: - * `_gpu_memory.16,dlrm_,offline,run_harness` - - Workflow: - * `_gpu_memory.16,gptj_,offline,run_harness` - - Workflow: - * `_gpu_memory.16,resnet50,offline,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` - - Workflow: - * `_gpu_memory.16,retinanet,offline,run_harness` - - Workflow: - * `_gpu_memory.16,rnnt,offline,run_harness` - - Workflow: - * `_gpu_memory.24,3d-unet_,offline,run_harness` - - Workflow: - * `_gpu_memory.24,bert_,offline,run_harness` - - Workflow: - * `_gpu_memory.24,dlrm_,offline,run_harness` - - Workflow: - * `_gpu_memory.24,gptj_,offline,run_harness` - - Workflow: - * `_gpu_memory.24,resnet50,offline,run_harness` - - Workflow: - * `_gpu_memory.24,retinanet,offline,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` - - Workflow: - * `_gpu_memory.24,rnnt,offline,run_harness` - - Workflow: - * `_gpu_memory.32,3d-unet_,offline,run_harness` - - Workflow: - * `_gpu_memory.32,bert_,offline,run_harness` - - Workflow: - * `_gpu_memory.32,dlrm_,offline,run_harness` - - Workflow: - * `_gpu_memory.32,gptj_,offline,run_harness` - - Workflow: - * `_gpu_memory.32,resnet50,offline,run_harness` - - Workflow: - * `_gpu_memory.32,retinanet,offline,run_harness` - - Workflow: - * `_gpu_memory.32,rnnt,offline,run_harness` - - Workflow: - * `_gpu_memory.40,3d-unet_,offline,run_harness` - - Workflow: - * `_gpu_memory.40,bert_,offline,run_harness` - - Workflow: - * `_gpu_memory.40,dlrm_,offline,run_harness` - - Workflow: - * `_gpu_memory.40,gptj_,offline,run_harness` - - Workflow: - * `_gpu_memory.40,resnet50,offline,run_harness` - - Workflow: - * `_gpu_memory.40,retinanet,offline,run_harness` - - Workflow: - * `_gpu_memory.40,rnnt,offline,run_harness` - - Workflow: - * `_gpu_memory.48,3d-unet_,offline,run_harness` - - Workflow: - * `_gpu_memory.48,bert_,offline,run_harness` - - Workflow: - * `_gpu_memory.48,dlrm_,offline,run_harness` - - Workflow: - * `_gpu_memory.48,gptj_,offline,run_harness` - - Workflow: - * `_gpu_memory.48,resnet50,offline,run_harness` - - Workflow: - * `_gpu_memory.48,retinanet,offline,run_harness` - - Workflow: - * `_gpu_memory.48,rnnt,offline,run_harness` - - Workflow: - * `_gpu_memory.80,3d-unet_,offline,run_harness` - - Workflow: - * `_gpu_memory.80,bert_,server,run_harness` - - Workflow: - * `_gpu_memory.80,dlrm_,offline,run_harness` - - Workflow: - * `_gpu_memory.80,gptj_,offline,run_harness` - - Workflow: - * `_gpu_memory.80,resnet50,offline,run_harness` - - Workflow: - * `_gpu_memory.80,retinanet,offline,run_harness` - - Workflow: - * `_gpu_memory.80,rnnt,offline,run_harness` - - Workflow: - * `_l4,3d-unet_,offline,run_harness` - - Workflow: - * `_l4,bert_,offline,run_harness` - - Workflow: - * `_l4,bert_,server,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GRAPHS_MAX_SEQLEN*: `200` - - *CM_MLPERF_NVIDIA_HARNESS_SERVER_NUM_ISSUE_QUERY_THREADS*: `1` - - *CM_MLPERF_NVIDIA_HARNESS_SOFT_DROP*: `1.0` - - *CM_MLPERF_NVIDIA_HARNESS_USE_SMALL_TILE_GEMM_PLUGIN*: `True` - - Workflow: - * `_l4,dlrm_,offline,run_harness` - - Workflow: - * `_l4,resnet50` - - Workflow: - * `_l4,resnet50,offline,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `1` - - *CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS*: `True` - - Workflow: - * `_l4,resnet50,server,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `9` - - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS*: `True` - - *CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT*: `True` - - *CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC*: `2000` - - *CM_MLPERF_NVIDIA_HARNESS_USE_CUDA_THREAD_PER_DEVICE*: `True` - - Workflow: - * `_l4,retinanet,offline,run_harness` - - Workflow: - * `_l4,retinanet,server,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT*: `True` - - *CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC*: `30000` - - *CM_MLPERF_NVIDIA_HARNESS_WORKSPACE_SIZE*: `20000000000` - - Workflow: - * `_l4,rnnt,offline,run_harness` - - Workflow: - * `_l4,rnnt,server,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_AUDIO_BATCH_SIZE*: `64` - - *CM_MLPERF_NVIDIA_HARNESS_AUDIO_BUFFER_NUM_LINES*: `1024` - - *CM_MLPERF_NVIDIA_HARNESS_NUM_WARMUPS*: `1024` - - Workflow: - * `_multistream,resnet50` - - Environment variables: - - *SKIP_POLICIES*: `1` - - Workflow: - * `_orin,rnnt,singlestream,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_NUM_WARMUPS*: `1` - - Workflow: - * `_resnet50,multistream,run_harness,num-gpus.1` - - Workflow: - * `_resnet50,multistream,run_harness,num-gpus.2` - - Workflow: - * `_resnet50,server,run_harness` - - Workflow: - * `_retinanet,multistream,run_harness` - - Workflow: - * `_retinanet,server,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` - - Workflow: - * `_rtx_4090,3d-unet_,offline,run_harness` - - Workflow: - * `_rtx_4090,3d-unet_,server,run_harness` - - Workflow: - * `_rtx_4090,bert_,offline,run_harness` - - Workflow: - * `_rtx_4090,bert_,server,run_harness` - - Workflow: - * `_rtx_4090,dlrm_,offline,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_EMBEDDING_WEIGHTS_ON_GPU_PART*: `0.30` - - Workflow: - * `_rtx_4090,gptj_,offline,run_harness` - - Workflow: - * `_rtx_4090,gptj_,server,run_harness` - - Workflow: - * `_rtx_4090,resnet50,offline,run_harness` - - Workflow: - * `_rtx_4090,resnet50,server,run_harness` - - Workflow: - * `_rtx_4090,retinanet,offline,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` - - Workflow: - * `_rtx_4090,retinanet,server,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` - - Workflow: - * `_rtx_4090,rnnt,offline,run_harness` - - Workflow: - * `_rtx_4090,rnnt,server,run_harness` - - Workflow: - * `_rtx_6000_ada,3d-unet_,offline,run_harness` - - Workflow: - * `_rtx_6000_ada,3d-unet_,server,run_harness` - - Workflow: - * `_rtx_6000_ada,bert_,offline,run_harness` - - Workflow: - * `_rtx_6000_ada,bert_,server,run_harness` - - Workflow: - * `_rtx_6000_ada,dlrm_,offline,run_harness` - - Workflow: - * `_rtx_6000_ada,resnet50,offline,run_harness` - - Workflow: - * `_rtx_6000_ada,resnet50,server,run_harness` - - Workflow: - * `_rtx_6000_ada,retinanet,offline,run_harness` - - Workflow: - * `_rtx_6000_ada,retinanet,server,run_harness` - - Workflow: - * `_rtx_6000_ada,rnnt,offline,run_harness` - - Workflow: - * `_rtx_6000_ada,rnnt,server,run_harness` - - Workflow: - * `_rtx_a6000,3d-unet_,offline,run_harness` - - Workflow: - * `_rtx_a6000,3d-unet_,server,run_harness` - - Workflow: - * `_rtx_a6000,bert_,offline,run_harness` - - Workflow: - * `_rtx_a6000,bert_,server,run_harness` - - Workflow: - * `_rtx_a6000,dlrm_,offline,run_harness` - - Workflow: - * `_rtx_a6000,resnet50,offline,run_harness` - - Workflow: - * `_rtx_a6000,resnet50,server,run_harness` - - Workflow: - * `_rtx_a6000,retinanet,offline,run_harness` - - Workflow: - * `_rtx_a6000,retinanet,server,run_harness` - - Workflow: - * `_rtx_a6000,rnnt,offline,run_harness` - - Workflow: - * `_rtx_a6000,rnnt,server,run_harness` - - Workflow: - * `_run-harness` - - Workflow: - * `_singlestream,resnet50` - - Environment variables: - - *SKIP_POLICIES*: `1` - - Workflow: - * `_singlestream,run_harness` - - Workflow: - * `_t4,3d-unet_,offline,run_harness` - - Workflow: - * `_t4,bert_,offline,run_harness` - - Workflow: - * `_t4,bert_,server,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GRAPHS_MAX_SEQLEN*: `240` - - *CM_MLPERF_NVIDIA_HARNESS_SERVER_NUM_ISSUE_QUERY_THREADS*: `0` - - *CM_MLPERF_NVIDIA_HARNESS_USE_SMALL_TILE_GEMM_PLUGIN*: `no` - - Workflow: - * `_t4,dlrm_,offline,run_harness` - - Workflow: - * `_t4,resnet50` - - Workflow: - * `_t4,resnet50,offline,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` - - Workflow: - * `_t4,resnet50,server,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` - - *CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT*: `True` - - *CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC*: `2000` - - *CM_MLPERF_NVIDIA_HARNESS_SOFT_DROP*: `0.993` - - Workflow: - * `_t4,retinanet,offline,run_harness` - - Workflow: - * `_t4,retinanet,server,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `2` - - *CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT*: `True` - - *CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC*: `20000` - - *CM_MLPERF_NVIDIA_HARNESS_WORKSPACE_SIZE*: `20000000000` - - Workflow: - * `_t4,rnnt,offline,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` - - *CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS*: `True` - - *CM_MLPERF_NVIDIA_HARNESS_AUDIO_BATCH_SIZE*: `128` - - *CM_MLPERF_NVIDIA_HARNESS_DISABLE_ENCODER_PLUGIN*: `True` - - Workflow: - * `_t4,rnnt,server,run_harness` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS*: `4` - - *CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS*: `True` - - *CM_MLPERF_NVIDIA_HARNESS_AUDIO_BATCH_SIZE*: `128` - - *CM_MLPERF_NVIDIA_HARNESS_DISABLE_ENCODER_PLUGIN*: `True` - - Workflow: - -
    - - - * Group "**backend**" -
    - Click here to expand this section. - - * **`_tensorrt`** (default) - - Environment variables: - - *CM_MLPERF_BACKEND*: `tensorrt` - - *CM_MLPERF_BACKEND_NAME*: `TensorRT` - - Workflow: - -
    - - - * Group "**batch-size**" -
    - Click here to expand this section. - - * `_batch_size.#` - - Environment variables: - - *CM_MODEL_BATCH_SIZE*: `#` - - *CM_MLPERF_NVIDIA_HARNESS_GPU_BATCH_SIZE*: `#` - - Workflow: - -
    - - - * Group "**build-engine-options**" -
    - Click here to expand this section. - - * `_build_engine_options.#` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_EXTRA_BUILD_ENGINE_OPTIONS*: `#` - - Workflow: - -
    - - - * Group "**device**" -
    - Click here to expand this section. - - * `_cpu` - - Environment variables: - - *CM_MLPERF_DEVICE*: `cpu` - - Workflow: - * **`_cuda`** (default) - - Environment variables: - - *CM_MLPERF_DEVICE*: `gpu` - - *CM_MLPERF_DEVICE_LIB_NAMESPEC*: `cudart` - - Workflow: - -
    - - - * Group "**device-memory**" -
    - Click here to expand this section. - - * `_gpu_memory.16` - - Environment variables: - - *CM_NVIDIA_GPU_MEMORY*: `16` - - Workflow: - * `_gpu_memory.24` - - Environment variables: - - *CM_NVIDIA_GPU_MEMORY*: `24` - - Workflow: - * `_gpu_memory.32` - - Environment variables: - - *CM_NVIDIA_GPU_MEMORY*: `32` - - Workflow: - * `_gpu_memory.40` - - Environment variables: - - *CM_NVIDIA_GPU_MEMORY*: `40` - - Workflow: - * `_gpu_memory.48` - - Environment variables: - - *CM_NVIDIA_GPU_MEMORY*: `48` - - Workflow: - * `_gpu_memory.8` - - Environment variables: - - *CM_NVIDIA_GPU_MEMORY*: `8` - - Workflow: - * `_gpu_memory.80` - - Environment variables: - - *CM_NVIDIA_GPU_MEMORY*: `80` - - Workflow: - -
    - - - * Group "**dla-batch-size**" -
    - Click here to expand this section. - - * `_dla_batch_size.#` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_DLA_BATCH_SIZE*: `#` - - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX2*: `dla_batch_size.#` - - Workflow: - -
    - - - * Group "**gpu-connection**" -
    - Click here to expand this section. - - * `_pcie` - - Workflow: - * `_sxm` - - Workflow: - -
    - - - * Group "**gpu-name**" -
    - Click here to expand this section. - - * `_a100` - - Environment variables: - - *CM_NVIDIA_CUSTOM_GPU*: `yes` - - Workflow: - * `_a6000` - - Environment variables: - - *CM_NVIDIA_CUSTOM_GPU*: `yes` - - Workflow: - * `_custom` - - Environment variables: - - *CM_NVIDIA_CUSTOM_GPU*: `yes` - - *CM_MODEL_BATCH_SIZE*: `` - - *CM_MLPERF_NVIDIA_HARNESS_GPU_BATCH_SIZE*: `<<>>` - - Workflow: - * `_l4` - - Environment variables: - - *CM_NVIDIA_CUSTOM_GPU*: `yes` - - Workflow: - * `_orin` - - Environment variables: - - *CM_NVIDIA_CUSTOM_GPU*: `yes` - - *CM_MODEL_BATCH_SIZE*: `` - - *CM_MLPERF_NVIDIA_HARNESS_GPU_BATCH_SIZE*: `<<>>` - - Workflow: - * `_rtx_4090` - - Environment variables: - - *CM_NVIDIA_CUSTOM_GPU*: `yes` - - Workflow: - * `_rtx_6000_ada` - - Environment variables: - - *CM_NVIDIA_CUSTOM_GPU*: `yes` - - Workflow: - * `_t4` - - Environment variables: - - *CM_NVIDIA_CUSTOM_GPU*: `yes` - - Workflow: - -
    - - - * Group "**loadgen-scenario**" -
    - Click here to expand this section. - - * `_multistream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `MultiStream` - - Workflow: - * `_offline` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Offline` - - Workflow: - * `_server` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `Server` - - Workflow: - * `_singlestream` - - Environment variables: - - *CM_MLPERF_LOADGEN_SCENARIO*: `SingleStream` - - *CUDA_VISIBLE_DEVICES_NOT_USED*: `0` - - Workflow: - -
    - - - * Group "**model**" -
    - Click here to expand this section. - - * `_3d-unet-99` - - Environment variables: - - *CM_MODEL*: `3d-unet-99` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128.onnx` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - Workflow: - * `_3d-unet-99.9` - - Environment variables: - - *CM_MODEL*: `3d-unet-99.9` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128.onnx` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - Workflow: - * `_bert-99` - - Environment variables: - - *CM_MODEL*: `bert-99` - - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3750364/files/bert_large_v1_1_fake_quant.onnx` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int32` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - Workflow: - * `_bert-99.9` - - Environment variables: - - *CM_MODEL*: `bert-99.9` - - *CM_NOT_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3733910/files/model.onnx` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int32` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` - - Workflow: - * `_dlrm-v2-99` - - Environment variables: - - *CM_MODEL*: `dlrm-v2-99` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `affine fusion` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` - - Workflow: - * `_dlrm-v2-99.9` - - Environment variables: - - *CM_MODEL*: `dlrm-v2-99.9` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `affine fusion` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp32` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` - - Workflow: - * `_gptj-99` - - Environment variables: - - *CM_MODEL*: `gptj-99` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int32` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` - - Workflow: - * `_gptj-99.9` - - Environment variables: - - *CM_MODEL*: `gptj-99.9` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int32` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` - - Workflow: - * **`_resnet50`** (default) - - Environment variables: - - *CM_MODEL*: `resnet50` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_onnx-graphsurgeon - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.onnx - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_retinanet` - - Environment variables: - - *CM_MODEL*: `retinanet` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/6617981/files/resnext50_32x4d_fpn.pth` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `int8` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `int8` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_Pillow - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchvision - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_opencv-python - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_numpy - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_pycocotools - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_onnx-graphsurgeon - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_package.onnx - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_rnnt` - - Environment variables: - - *CM_MODEL*: `rnnt` - - *CM_ML_MODEL_STARTING_WEIGHTS_FILENAME*: `https://zenodo.org/record/3662521/files/DistributedDataParallel_1576581068.9962234-epoch-100.pt` - - *CM_ML_MODEL_WEIGHT_TRANSFORMATIONS*: `quantization, affine fusion` - - *CM_ML_MODEL_INPUTS_DATA_TYPE*: `fp16` - - *CM_ML_MODEL_WEIGHTS_DATA_TYPE*: `fp16` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_toml - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchvision - * CM names: `--adr.['torchvision']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torch - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_nvidia-apex - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_unidecode - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_inflect - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_librosa - * CM names: `--adr.['librosa']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_sox - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-sys-util,_sox - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - -
    - - - * Group "**num-gpus**" -
    - Click here to expand this section. - - * `_num-gpus.#` - - Environment variables: - - *CM_NVIDIA_NUM_GPUS*: `#` - - Workflow: - * **`_num-gpus.1`** (default) - - Environment variables: - - *CM_NVIDIA_NUM_GPUS*: `1` - - Workflow: - -
    - - - * Group "**power-mode**" -
    - Click here to expand this section. - - * `_maxn` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_MAXN*: `True` - - Workflow: - * `_maxq` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_MAXQ*: `True` - - Workflow: - -
    - - - * Group "**run-mode**" -
    - Click here to expand this section. - - * `_build` - - Environment variables: - - *MLPERF_NVIDIA_RUN_COMMAND*: `build` - - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `build` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * get,generic,sys-util,_glog-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_gflags-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_libgmock-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_libre2-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_libnuma-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_libboost-all-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,generic,sys-util,_rapidjson-dev - - CM script: [get-generic-sys-util](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-sys-util) - * get,cuda,_cudnn - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,tensorrt - * CM names: `--adr.['tensorrt']...` - - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) - * build,nvidia,inference,server - * CM names: `--adr.['nvidia-inference-server']...` - - CM script: [build-mlperf-inference-server-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/build-mlperf-inference-server-nvidia) - * `_build_engine` - - Aliases: `_build-engine` - - Environment variables: - - *MLPERF_NVIDIA_RUN_COMMAND*: `generate_engines` - - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `generate_engines` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda,_cudnn - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,tensorrt - * CM names: `--adr.['tensorrt']...` - - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) - * build,nvidia,inference,server - * CM names: `--adr.['nvidia-inference-server']...` - - CM script: [build-mlperf-inference-server-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/build-mlperf-inference-server-nvidia) - * reproduce,mlperf,inference,nvidia,harness,_preprocess_data - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MODEL': ['dlrm-v2-99', 'dlrm-v2-99.9']}` - - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) - * reproduce,mlperf,inference,nvidia,harness,_download_model - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet_old', 'resnet50', 'bert-99', 'bert-99.9', 'dlrm-v2-99', 'dlrm-v2-99.9']}` - - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) - * reproduce,mlperf,inference,nvidia,harness,_calibrate - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) - * `_calibrate` - - Environment variables: - - *MLPERF_NVIDIA_RUN_COMMAND*: `calibrate` - - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `calibrate` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * reproduce,mlperf,inference,nvidia,harness,_download_model - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet_old', 'resnet50', 'bert-99', 'bert-99.9', 'dlrm-v2-99', 'dlrm-v2-99.9']}` - - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) - * `_download_model` - - Environment variables: - - *MLPERF_NVIDIA_RUN_COMMAND*: `download_model` - - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `download_model` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,generic-python-lib,_torch_cuda - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * `_prebuild` - - Environment variables: - - *MLPERF_NVIDIA_RUN_COMMAND*: `prebuild` - - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `prebuild` - - Workflow: - * `_preprocess_data` - - Environment variables: - - *MLPERF_NVIDIA_RUN_COMMAND*: `preprocess_data` - - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `preprocess_data` - - Workflow: - * **`_run_harness`** (default) - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_RUN_MODE*: `run_harness` - - *MLPERF_NVIDIA_RUN_COMMAND*: `run_harness` - - *CM_CALL_MLPERF_RUNNER*: `yes` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * get,cuda,_cudnn - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - * get,tensorrt - * CM names: `--adr.['tensorrt']...` - - CM script: [get-tensorrt](https://github.com/mlcommons/cm4mlops/tree/master/script/get-tensorrt) - * build,nvidia,inference,server - * CM names: `--adr.['nvidia-inference-server']...` - - CM script: [build-mlperf-inference-server-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/build-mlperf-inference-server-nvidia) - * reproduce,mlperf,inference,nvidia,harness,_build_engine - * CM names: `--adr.['build-engine']...` - - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) - * reproduce,mlperf,inference,nvidia,harness,_preprocess_data - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MODEL': ['dlrm-v2-99', 'dlrm-v2-99.9']}` - - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) - * reproduce,mlperf,inference,nvidia,harness,_download_model - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet', 'resnet50', 'bert-99', 'bert-99.9', 'dlrm-v2-99', 'dlrm-v2-99.9']}` - - CM script: [app-mlperf-inference-nvidia](https://github.com/mlcommons/cm4mlops/tree/master/script/app-mlperf-inference-nvidia) - -
    - - - * Group "**triton**" -
    - Click here to expand this section. - - * `_use_triton` - - Environment variables: - - *CM_MLPERF_NVIDIA_HARNESS_USE_TRITON*: `yes` - - *CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX3*: `using_triton` - - Workflow: - -
    - - -#### Default variations - -`_cuda,_num-gpus.1,_resnet50,_run_harness,_tensorrt` - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--audio_buffer_num_lines=value` → `CM_MLPERF_NVIDIA_HARNESS_AUDIO_BUFFER_NUM_LINES=value` -* `--count=value` → `CM_MLPERF_LOADGEN_QUERY_COUNT=value` -* `--deque_timeout_usec=value` → `CM_MLPERF_NVIDIA_HARNESS_DEQUE_TIMEOUT_USEC=value` -* `--devices=value` → `CM_MLPERF_NVIDIA_HARNESS_DEVICES=value` -* `--dla_batch_size=value` → `CM_MLPERF_NVIDIA_HARNESS_DLA_BATCH_SIZE=value` -* `--dla_copy_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_DLA_COPY_STREAMS=value` -* `--dla_inference_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_DLA_INFERENCE_STREAMS=value` -* `--embedding_weights_on_gpu_part=value` → `CM_MLPERF_NVIDIA_HARNESS_EMBEDDING_WEIGHTS_ON_GPU_PART=value` -* `--enable_sort=value` → `CM_MLPERF_NVIDIA_HARNESS_ENABLE_SORT=value` -* `--end_on_device=value` → `CM_MLPERF_NVIDIA_HARNESS_END_ON_DEVICE=value` -* `--extra_run_options=value` → `CM_MLPERF_NVIDIA_HARNESS_EXTRA_RUN_OPTIONS=value` -* `--gpu_batch_size=value` → `CM_MLPERF_NVIDIA_HARNESS_GPU_BATCH_SIZE=value` -* `--gpu_copy_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_GPU_COPY_STREAMS=value` -* `--gpu_inference_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_GPU_INFERENCE_STREAMS=value` -* `--graphs_max_seqlen=value` → `CM_MLPERF_NVIDIA_HARNESS_GRAPHS_MAX_SEQLEN=value` -* `--input_format=value` → `CM_MLPERF_NVIDIA_HARNESS_INPUT_FORMAT=value` -* `--log_dir=value` → `CM_MLPERF_NVIDIA_HARNESS_LOG_DIR=value` -* `--make_cmd=value` → `MLPERF_NVIDIA_RUN_COMMAND=value` -* `--max_batchsize=value` → `CM_MLPERF_LOADGEN_MAX_BATCHSIZE=value` -* `--max_dlas=value` → `CM_MLPERF_NVIDIA_HARNESS_MAX_DLAS=value` -* `--mlperf_conf=value` → `CM_MLPERF_CONF=value` -* `--mode=value` → `CM_MLPERF_LOADGEN_MODE=value` -* `--multistream_target_latency=value` → `CM_MLPERF_LOADGEN_MULTISTREAM_TARGET_LATENCY=value` -* `--num_issue_query_threads=value` → `CM_MLPERF_NVIDIA_HARNESS_NUM_ISSUE_QUERY_THREADS=value` -* `--num_sort_segments=value` → `CM_MLPERF_NVIDIA_HARNESS_NUM_SORT_SEGMENTS=value` -* `--num_warmups=value` → `CM_MLPERF_NVIDIA_HARNESS_NUM_WARMUPS=value` -* `--offline_target_qps=value` → `CM_MLPERF_LOADGEN_OFFLINE_TARGET_QPS=value` -* `--output_dir=value` → `CM_MLPERF_OUTPUT_DIR=value` -* `--performance_sample_count=value` → `CM_MLPERF_LOADGEN_PERFORMANCE_SAMPLE_COUNT=value` -* `--power_setting=value` → `CM_MLPERF_NVIDIA_HARNESS_POWER_SETTING=value` -* `--rerun=value` → `CM_RERUN=value` -* `--run_infer_on_copy_streams=value` → `CM_MLPERF_NVIDIA_HARNESS_RUN_INFER_ON_COPY_STREAMS=value` -* `--scenario=value` → `CM_MLPERF_LOADGEN_SCENARIO=value` -* `--server_target_qps=value` → `CM_MLPERF_LOADGEN_SERVER_TARGET_QPS=value` -* `--singlestream_target_latency=value` → `CM_MLPERF_LOADGEN_SINGLESTREAM_TARGET_LATENCY=value` -* `--skip_postprocess=value` → `CM_MLPERF_NVIDIA_HARNESS_SKIP_POSTPROCESS=value` -* `--skip_preprocess=value` → `CM_SKIP_PREPROCESS_DATASET=value` -* `--skip_preprocessing=value` → `CM_SKIP_PREPROCESS_DATASET=value` -* `--soft_drop=value` → `CM_MLPERF_NVIDIA_HARNESS_SOFT_DROP=value` -* `--start_from_device=value` → `CM_MLPERF_NVIDIA_HARNESS_START_FROM_DEVICE=value` -* `--target_latency=value` → `CM_MLPERF_LOADGEN_TARGET_LATENCY=value` -* `--target_qps=value` → `CM_MLPERF_LOADGEN_TARGET_QPS=value` -* `--use_cuda_thread_per_device=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_CUDA_THREAD_PER_DEVICE=value` -* `--use_deque_limit=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_DEQUE_LIMIT=value` -* `--use_fp8=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_FP8=value` -* `--use_graphs=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_GRAPHS=value` -* `--use_small_tile_gemm_plugin=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_SMALL_TILE_GEMM_PLUGIN=value` -* `--use_triton=value` → `CM_MLPERF_NVIDIA_HARNESS_USE_TRITON=value` -* `--user_conf=value` → `CM_MLPERF_USER_CONF=value` -* `--workspace_size=value` → `CM_MLPERF_NVIDIA_HARNESS_WORKSPACE_SIZE=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "audio_buffer_num_lines":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_BATCH_COUNT: `1` -* CM_BATCH_SIZE: `1` -* CM_FAST_COMPILATION: `yes` -* CM_MLPERF_LOADGEN_SCENARIO: `Offline` -* CM_MLPERF_LOADGEN_MODE: `performance` -* CM_SKIP_PREPROCESS_DATASET: `no` -* CM_SKIP_MODEL_DOWNLOAD: `no` -* CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX: `nvidia_original` -* CM_MLPERF_SKIP_RUN: `no` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,mlperf,inference,nvidia,scratch,space - * CM names: `--adr.['nvidia-scratch-space']...` - - CM script: [get-mlperf-inference-nvidia-scratch-space](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-scratch-space) - * get,generic-python-lib,_mlperf_logging - * CM names: `--adr.['mlperf-logging']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,dataset,original,imagenet,_full - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - * CM names: `--adr.['imagenet-original']...` - - CM script: [get-dataset-imagenet-val](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-imagenet-val) - * get,ml-model,resnet50,_fp32,_onnx,_opset-8 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['resnet50']}` - * CM names: `--adr.['resnet50-model', 'ml-model']...` - - CM script: [get-ml-model-resnet50](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-resnet50) - * get,dataset,original,kits19 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['3d-unet-99-disabled', '3d-unet-99.9-disabled']}` - * CM names: `--adr.['kits19-original']...` - - CM script: [get-dataset-kits19](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-kits19) - * get,dataset,original,librispeech - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['rnnt']}` - * CM names: `--adr.['librispeech-original']...` - - CM script: [get-dataset-librispeech](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-librispeech) - * get,dataset,preprocessed,criteo - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['dlrm-v2-99', 'dlrm-v2-99.9']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'DLRM_DATA_PATH': [True]}` - * CM names: `--adr.['criteo-preprocessed']...` - - CM script: [get-preprocessed-dataset-criteo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-preprocessed-dataset-criteo) - * get,ml-model,dlrm,_pytorch - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['dlrm-v2-99', 'dlrm-v2-99.9']}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'DLRM_DATA_PATH': [True]}` - * CM names: `--adr.['dlrm-model']...` - - CM script: [get-ml-model-dlrm-terabyte](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-dlrm-terabyte) - * get,ml-model,bert,_onnx,_fp32 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['bert-99', 'bert-99.9']}` - * CM names: `--adr.['bert-model', 'bert-model-fp32']...` - - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) - * get,ml-model,bert,_onnx,_int8 - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['bert-99', 'bert-99.9']}` - * CM names: `--adr.['bert-model', 'bert-model-int8']...` - - CM script: [get-ml-model-bert-large-squad](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-bert-large-squad) - * get,squad-vocab - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['bert-99', 'bert-99.9']}` - * CM names: `--adr.['bert-vocab']...` - - CM script: [get-dataset-squad-vocab](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-squad-vocab) - * get,dataset,original,openimages,_validation,_full,_custom-annotations - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - * CM names: `--adr.['openimages-original']...` - - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) - * get,dataset,original,openimages,_calibration - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['retinanet']}` - * CM names: `--adr.['openimages-calibration']...` - - CM script: [get-dataset-openimages](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openimages) - * get,dataset,original,openorca - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MODEL': ['gptj-99', 'gptj-99.9'], 'CM_MLPERF_NVIDIA_HARNESS_RUN_MODE': ['preprocess_dataset']}` - * CM names: `--adr.['openorca-original']...` - - CM script: [get-dataset-openorca](https://github.com/mlcommons/cm4mlops/tree/master/script/get-dataset-openorca) - * get,mlcommons,inference,src - * CM names: `--adr.['inference-src']...` - - CM script: [get-mlperf-inference-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-src) - * get,nvidia,mlperf,inference,common-code - * CM names: `--adr.['nvidia-inference-common-code']...` - - CM script: [get-mlperf-inference-nvidia-common-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-inference-nvidia-common-code) - * generate,user-conf,mlperf,inference - * Enable this dependency only if all ENV vars are set:
    -`{'CM_MLPERF_NVIDIA_HARNESS_RUN_MODE': ['run_harness']}` - * CM names: `--adr.['user-conf-generator']...` - - CM script: [generate-mlperf-inference-user-conf](https://github.com/mlcommons/cm4mlops/tree/master/script/generate-mlperf-inference-user-conf) - * get,generic-python-lib,_package.nvmitten,_path./opt/nvmitten-0.1.3-cp38-cp38-linux_x86_64.whl - * Enable this dependency only if all ENV vars are set:
    -`{'CM_RUN_STATE_DOCKER': ['yes', True, 'True']}` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,nvidia,mitten - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_RUN_STATE_DOCKER': ['yes', True, 'True']}` - - CM script: [get-nvidia-mitten](https://github.com/mlcommons/cm4mlops/tree/master/script/get-nvidia-mitten) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/customize.py)*** - 1. ***Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/_cm.yaml)*** - * get,ml-model,gptj,_pytorch,_rclone - * Enable this dependency only if all ENV vars are set:
    -`{'CM_REQUIRE_GPTJ_MODEL_DOWNLOAD': ['yes'], 'CM_MLPERF_NVIDIA_HARNESS_RUN_MODE': ['download_model', 'preprocess_data']}` - * CM names: `--adr.['gptj-model']...` - - CM script: [get-ml-model-gptj](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-gptj) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/app-mlperf-inference-nvidia/_cm.yaml)*** - * benchmark-mlperf - * Enable this dependency only if all ENV vars are set:
    -`{'CM_CALL_MLPERF_RUNNER': [True]}` - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_MLPERF_SKIP_RUN': ['yes', True]}` - * CM names: `--adr.['runner', 'mlperf-runner']...` - - CM script: [benchmark-program-mlperf](https://github.com/mlcommons/cm4mlops/tree/master/script/benchmark-program-mlperf) - * save,mlperf,inference,state - * CM names: `--adr.['save-mlperf-inference-state']...` - - CM script: [save-mlperf-inference-implementation-state](https://github.com/mlcommons/cm4mlops/tree/master/script/save-mlperf-inference-implementation-state) - -___ -### Script output -`cmr "reproduce mlcommons mlperf inference harness nvidia-harness nvidia [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Reproduce-MLPerf-benchmarks/index.md b/docs/Reproduce-MLPerf-benchmarks/index.md new file mode 100644 index 000000000..dc3eee810 --- /dev/null +++ b/docs/Reproduce-MLPerf-benchmarks/index.md @@ -0,0 +1,6 @@ +The Reproduce MLPerf benchmarks category contains the following scripts: + +- [app-mlperf-inference-nvidia](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/app-mlperf-inference-nvidia/README.md) +- [reproduce-mlperf-octoml-tinyml-results](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/reproduce-mlperf-octoml-tinyml-results/README.md) +- [reproduce-mlperf-training-nvidia](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/reproduce-mlperf-training-nvidia/README.md) +- [wrapper-reproduce-octoml-tinyml-submission](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/wrapper-reproduce-octoml-tinyml-submission/README.md) diff --git a/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results/index.md b/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results/index.md deleted file mode 100644 index 79baf27ea..000000000 --- a/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results/index.md +++ /dev/null @@ -1,214 +0,0 @@ -Automatically generated README for this automation recipe: **reproduce-mlperf-octoml-tinyml-results** - -Category: **Reproduce MLPerf benchmarks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=reproduce-mlperf-octoml-tinyml-results,a63803a707d04332) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *reproduce,tiny,results,mlperf,octoml,mlcommons* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "reproduce tiny results mlperf octoml mlcommons" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=reproduce,tiny,results,mlperf,octoml,mlcommons` - -`cm run script --tags=reproduce,tiny,results,mlperf,octoml,mlcommons[,variations] [--input_flags]` - -*or* - -`cmr "reproduce tiny results mlperf octoml mlcommons"` - -`cmr "reproduce tiny results mlperf octoml mlcommons [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'reproduce,tiny,results,mlperf,octoml,mlcommons' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="reproduce,tiny,results,mlperf,octoml,mlcommons"``` - -#### Run this script via Docker (beta) - -`cm docker script "reproduce tiny results mlperf octoml mlcommons[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_NRF` - - Environment variables: - - *CM_TINY_BOARD*: `NRF5340DK` - - Workflow: - * `_NUCLEO` - - Environment variables: - - *CM_TINY_BOARD*: `NUCLEO_L4R5ZI` - - Workflow: - * `_ad` - - Environment variables: - - *CM_TINY_MODEL*: `ad` - - Workflow: - * `_cmsis_nn` - - Environment variables: - - *CM_MICROTVM_VARIANT*: `microtvm_cmsis_nn` - - Workflow: - * `_ic` - - Environment variables: - - *CM_TINY_MODEL*: `ic` - - Workflow: - * `_kws` - - Environment variables: - - *CM_TINY_MODEL*: `kws` - - Workflow: - * `_native` - - Environment variables: - - *CM_MICROTVM_VARIANT*: `microtvm_native` - - Workflow: - * `_vww` - - Environment variables: - - *CM_TINY_MODEL*: `vww` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--flash=value` → `CM_FLASH_BOARD=value` -* `--recreate_binary=value` → `CM_RECREATE_BINARY=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "flash":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `r1.0` - -* `r1.0` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,zephyr - * CM names: `--adr.['zephyr']...` - - CM script: [get-zephyr](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr) - * get,zephyr-sdk - * CM names: `--adr.['zephyr-sdk']...` - - CM script: [get-zephyr-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr-sdk) - * get,cmsis - * CM names: `--adr.['cmsis']...` - - CM script: [get-cmsis_5](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmsis_5) - * get,microtvm - * CM names: `--adr.['microtvm']...` - - CM script: [get-microtvm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-microtvm) - * get,cmake - * CM names: `--adr.['cmake']...` - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * get,gcc - - CM script: [get-gcc](https://github.com/mlcommons/cm4mlops/tree/master/script/get-gcc) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/customize.py)*** - 1. ***Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-octoml-tinyml-results/_cm.json)*** - * flash,tiny,mlperf - * Enable this dependency only if all ENV vars are set:
    -`{'CM_FLASH_BOARD': ['True']}` - - CM script: [flash-tinyml-binary](https://github.com/mlcommons/cm4mlops/tree/master/script/flash-tinyml-binary) - -___ -### Script output -`cmr "reproduce tiny results mlperf octoml mlcommons [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_TINY_*` -#### New environment keys auto-detected from customize - -* `CM_TINY_MODEL` \ No newline at end of file diff --git a/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia/index.md b/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia/index.md deleted file mode 100644 index 86f08d6b8..000000000 --- a/docs/Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia/index.md +++ /dev/null @@ -1,169 +0,0 @@ -Automatically generated README for this automation recipe: **reproduce-mlperf-training-nvidia** - -Category: **Reproduce MLPerf benchmarks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=reproduce-mlperf-training-nvidia,f183628f292341e2) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "reproduce mlcommons mlperf train training nvidia-training nvidia" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia` - -`cm run script --tags=reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia[,variations] [--input_flags]` - -*or* - -`cmr "reproduce mlcommons mlperf train training nvidia-training nvidia"` - -`cmr "reproduce mlcommons mlperf train training nvidia-training nvidia [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="reproduce,mlcommons,mlperf,train,training,nvidia-training,nvidia"``` - -#### Run this script via Docker (beta) - -`cm docker script "reproduce mlcommons mlperf train training nvidia-training nvidia[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * Group "**benchmark**" -
    - Click here to expand this section. - - * `_resnet` - - Environment variables: - - *CM_MLPERF_TRAINING_BENCHMARK*: `resnet` - - Workflow: - 1. ***Read "deps" on other CM scripts*** - * prepare,mlperf,training,resnet,_nvidia - * CM names: `--adr.['prepare-training-data', 'nvidia-training-data']...` - - CM script: [prepare-training-data-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/prepare-training-data-resnet) - * get,nvidia,training,code - * CM names: `--adr.['nvidia-training-code']...` - - CM script: [get-mlperf-training-nvidia-code](https://github.com/mlcommons/cm4mlops/tree/master/script/get-mlperf-training-nvidia-code) - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--results_dir=value` → `CM_MLPERF_RESULTS_DIR=value` -* `--system_conf_name=value` → `CM_MLPERF_NVIDIA_TRAINING_SYSTEM_CONF_NAME=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "results_dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -* `r2.1` -* `r3.0` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * detect,cpu - - CM script: [detect-cpu](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-cpu) - * get,nvidia-docker - - CM script: [get-nvidia-docker](https://github.com/mlcommons/cm4mlops/tree/master/script/get-nvidia-docker) - * get,cuda - * CM names: `--adr.['cuda']...` - - CM script: [get-cuda](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cuda) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/_cm.yaml) - 1. ***Run native script if exists*** - * [run-resnet.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/run-resnet.sh) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-mlperf-training-nvidia/_cm.yaml) - -___ -### Script output -`cmr "reproduce mlcommons mlperf train training nvidia-training nvidia [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission/index.md b/docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission/index.md deleted file mode 100644 index 4854b325c..000000000 --- a/docs/Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission/index.md +++ /dev/null @@ -1,140 +0,0 @@ -Automatically generated README for this automation recipe: **wrapper-reproduce-octoml-tinyml-submission** - -Category: **Reproduce MLPerf benchmarks** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=wrapper-reproduce-octoml-tinyml-submission,b946001e289c4480) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml` - -`cm run script --tags=run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml [--input_flags]` - -*or* - -`cmr "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml"` - -`cmr "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,generate-tiny,generate,submission,tiny,generate-tiny-submission,results,mlcommons,mlperf,octoml"``` - -#### Run this script via Docker (beta) - -`cm docker script "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--flash=value` → `CM_FLASH_BOARD=value` -* `--recreate_binary=value` → `CM_RECREATE_BINARY=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "flash":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `r1.0` - -* `r1.0` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/_cm.json)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/wrapper-reproduce-octoml-tinyml-submission/_cm.json) - -___ -### Script output -`cmr "run generate-tiny generate submission tiny generate-tiny-submission results mlcommons mlperf octoml " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Reproducibility-and-artifact-evaluation/get-ipol-src/index.md b/docs/Reproducibility-and-artifact-evaluation/get-ipol-src/index.md deleted file mode 100644 index 755607bfb..000000000 --- a/docs/Reproducibility-and-artifact-evaluation/get-ipol-src/index.md +++ /dev/null @@ -1,146 +0,0 @@ -Automatically generated README for this automation recipe: **get-ipol-src** - -Category: **Reproducibility and artifact evaluation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ipol-src,b6fd8213d03d4aa4) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,ipol,journal,src,ipol-src* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get ipol journal src ipol-src" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,ipol,journal,src,ipol-src` - -`cm run script --tags=get,ipol,journal,src,ipol-src [--input_flags]` - -*or* - -`cmr "get ipol journal src ipol-src"` - -`cmr "get ipol journal src ipol-src " [--input_flags]` - - - -#### Input Flags - -* --**number**=IPOL publication number -* --**year**=IPOL publication year - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "number":...} -``` -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,ipol,journal,src,ipol-src' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,ipol,journal,src,ipol-src"``` - -#### Run this script via Docker (beta) - -`cm docker script "get ipol journal src ipol-src" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--number=value` → `CM_IPOL_NUMBER=value` -* `--year=value` → `CM_IPOL_YEAR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "number":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/_cm.json) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/_cm.json) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-ipol-src/_cm.json) - -___ -### Script output -`cmr "get ipol journal src ipol-src " [--input_flags] -j` -#### New environment keys (filter) - -* `CM_IPOL_*` -#### New environment keys auto-detected from customize - -* `CM_IPOL_PATH` \ No newline at end of file diff --git a/docs/Reproducibility-and-artifact-evaluation/index.md b/docs/Reproducibility-and-artifact-evaluation/index.md new file mode 100644 index 000000000..bea805cf7 --- /dev/null +++ b/docs/Reproducibility-and-artifact-evaluation/index.md @@ -0,0 +1,6 @@ +The Reproducibility and artifact evaluation category contains the following scripts: + +- [get-ipol-src](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-ipol-src/README.md) +- [process-ae-users](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/process-ae-users/README.md) +- [reproduce-ipol-paper-2022-439](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/reproduce-ipol-paper-2022-439/README.md) +- [reproduce-micro-paper-2023-victima](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/reproduce-micro-paper-2023-victima/README.md) diff --git a/docs/Reproducibility-and-artifact-evaluation/process-ae-users/index.md b/docs/Reproducibility-and-artifact-evaluation/process-ae-users/index.md deleted file mode 100644 index 38c4316ad..000000000 --- a/docs/Reproducibility-and-artifact-evaluation/process-ae-users/index.md +++ /dev/null @@ -1,136 +0,0 @@ -Automatically generated README for this automation recipe: **process-ae-users** - -Category: **Reproducibility and artifact evaluation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=process-ae-users,5800f1ed677e4efb) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *process,ae,users* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "process ae users" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=process,ae,users` - -`cm run script --tags=process,ae,users [--input_flags]` - -*or* - -`cmr "process ae users"` - -`cmr "process ae users " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'process,ae,users' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="process,ae,users"``` - -#### Run this script via Docker (beta) - -`cm docker script "process ae users" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--file=value` → `CM_PROCESS_AE_USERS_INPUT_FILE=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "file":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/_cm.json)*** - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/process-ae-users/_cm.json) - -___ -### Script output -`cmr "process ae users " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439/index.md b/docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439/index.md deleted file mode 100644 index 5212a87b0..000000000 --- a/docs/Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439/index.md +++ /dev/null @@ -1,148 +0,0 @@ -Automatically generated README for this automation recipe: **reproduce-ipol-paper-2022-439** - -Category: **Reproducibility and artifact evaluation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=reproduce-ipol-paper-2022-439,f9b9e5bd65e34e4f) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439` - -`cm run script --tags=app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439 [--input_flags]` - -*or* - -`cmr "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439"` - -`cmr "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439 " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="app,python,reproduce,project,paper,ipol,journal,repro,reproducibility,pytorch,2022-439"``` - -#### Run this script via Docker (beta) - -`cm docker script "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--image1=value` → `CM_IMAGE_1=value` -* `--image2=value` → `CM_IMAGE_2=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "image1":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,ipol,src - * CM names: `--adr.['ipol-src']...` - - CM script: [get-ipol-src](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ipol-src) - * get,generic-python-lib,_torch - * CM names: `--adr.['torch']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - * get,generic-python-lib,_torchvision - * CM names: `--adr.['torchvision']...` - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/reproduce-ipol-paper-2022-439/_cm.yaml) - -___ -### Script output -`cmr "app python reproduce project paper ipol journal repro reproducibility pytorch 2022-439 " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima/index.md b/docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima/index.md deleted file mode 100644 index 41b899e1e..000000000 --- a/docs/Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima/index.md +++ /dev/null @@ -1,179 +0,0 @@ -
    -Click here to see the table of contents. - -* [About](#about) -* [Summary](#summary) -* [Reuse this script in your project](#reuse-this-script-in-your-project) - * [ Install CM automation language](#install-cm-automation-language) - * [ Check CM script flags](#check-cm-script-flags) - * [ Run this script from command line](#run-this-script-from-command-line) - * [ Run this script from Python](#run-this-script-from-python) - * [ Run this script via GUI](#run-this-script-via-gui) - * [ Run this script via Docker (beta)](#run-this-script-via-docker-(beta)) -* [Customization](#customization) - * [ Variations](#variations) - * [ Script flags mapped to environment](#script-flags-mapped-to-environment) - * [ Default environment](#default-environment) -* [Script workflow, dependencies and native scripts](#script-workflow-dependencies-and-native-scripts) -* [Script output](#script-output) -* [New environment keys (filter)](#new-environment-keys-(filter)) -* [New environment keys auto-detected from customize](#new-environment-keys-auto-detected-from-customize) -* [Maintainers](#maintainers) - -
    - -*Note that this README is automatically generated - don't edit!* - -### About - - -See extra [notes](README-extra.md) from the authors and contributors. - -#### Summary - -* Category: *Reproducibility and artifact evaluation.* -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* CM "database" tags to find this script: *reproduce,project,paper,micro,micro-2023,victima* -* Output cached? *False* -___ -### Reuse this script in your project - -#### Install CM automation language - -* [Installation guide](https://github.com/mlcommons/ck/blob/master/docs/installation.md) -* [CM intro](https://doi.org/10.5281/zenodo.8105339) - -#### Pull CM repository with this automation - -```cm pull repo mlcommons@cm4mlops --checkout=dev``` - - -#### Run this script from command line - -1. `cm run script --tags=reproduce,project,paper,micro,micro-2023,victima[,variations] [--input_flags]` - -2. `cmr "reproduce project paper micro micro-2023 victima[ variations]" [--input_flags]` - -* `variations` can be seen [here](#variations) - -* `input_flags` can be seen [here](#script-flags-mapped-to-environment) - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'reproduce,project,paper,micro,micro-2023,victima' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="reproduce,project,paper,micro,micro-2023,victima"``` - -Use this [online GUI](https://cKnowledge.org/cm-gui/?tags=reproduce,project,paper,micro,micro-2023,victima) to generate CM CMD. - -#### Run this script via Docker (beta) - -`cm docker script "reproduce project paper micro micro-2023 victima[ variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_install_deps` - - Workflow: - * `_plot` - - Workflow: - * `_run` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--container=value` → `CM_VICTIMA_CONTAINER=value` -* `--job_manager=value` → `CM_VICTIMA_JOB_MANAGER=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "container":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_VICTIMA_JOB_MANAGER: `native` -* CM_VICTIMA_CONTAINER: `docker` - -
    - -___ -### Script workflow, dependencies and native scripts - -
    -Click here to expand this section. - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/main/script/detect-os) - * get,python - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/main/script/get-python3) - * get,git,repo,_repo.https://github.com/CMU-SAFARI/Victima - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/main/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/_cm.yaml) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/main/script/reproduce-micro-paper-2023-victima/_cm.yaml) -
    - -___ -### Script output -`cmr "reproduce project paper micro micro-2023 victima[,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize - -___ -### Maintainers - -* [Open MLCommons taskforce on automation and reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) \ No newline at end of file diff --git a/docs/Tests/index.md b/docs/Tests/index.md new file mode 100644 index 000000000..b21c061cd --- /dev/null +++ b/docs/Tests/index.md @@ -0,0 +1,17 @@ +The Tests category contains the following scripts: + +- [print-any-text](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/print-any-text/README.md) +- [print-croissant-desc](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/print-croissant-desc/README.md) +- [print-hello-world](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/print-hello-world/README.md) +- [print-hello-world-java](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/print-hello-world-java/README.md) +- [print-hello-world-javac](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/print-hello-world-javac/README.md) +- [print-hello-world-py](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/print-hello-world-py/README.md) +- [print-python-version](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/print-python-version/README.md) +- [run-python](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/run-python/README.md) +- [test-cm-core](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/test-cm-core/README.md) +- [test-cm-script-pipeline](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/test-cm-script-pipeline/README.md) +- [test-deps-conditions](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/test-deps-conditions/README.md) +- [test-deps-conditions2](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/test-deps-conditions2/README.md) +- [test-download-and-extract-artifacts](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/test-download-and-extract-artifacts/README.md) +- [test-set-sys-user-cm](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/test-set-sys-user-cm/README.md) +- [upgrade-python-pip](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/upgrade-python-pip/README.md) diff --git a/docs/Tests/print-croissant-desc/index.md b/docs/Tests/print-croissant-desc/index.md deleted file mode 100644 index fafb36774..000000000 --- a/docs/Tests/print-croissant-desc/index.md +++ /dev/null @@ -1,144 +0,0 @@ -Automatically generated README for this automation recipe: **print-croissant-desc** - -Category: **Tests** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-croissant-desc,59116d5c98a04d4f) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *print,croissant,desc* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "print croissant desc" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=print,croissant,desc` - -`cm run script --tags=print,croissant,desc [--input_flags]` - -*or* - -`cmr "print croissant desc"` - -`cmr "print croissant desc " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'print,croissant,desc' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="print,croissant,desc"``` - -#### Run this script via Docker (beta) - -`cm docker script "print croissant desc" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--url=value` → `CM_PRINT_CROISSANT_URL=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "url":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_PRINT_CROISSANT_URL: `https://raw.githubusercontent.com/mlcommons/croissant/main/datasets/1.0/gpt-3/metadata.json` - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/_cm.yaml)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,croissant - * CM names: `--adr.['croissant']...` - - CM script: [get-croissant](https://github.com/mlcommons/cm4mlops/tree/master/script/get-croissant) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/_cm.yaml) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-croissant-desc/_cm.yaml) - -___ -### Script output -`cmr "print croissant desc " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-hello-world-java/index.md b/docs/Tests/print-hello-world-java/index.md deleted file mode 100644 index 2b51ce3f0..000000000 --- a/docs/Tests/print-hello-world-java/index.md +++ /dev/null @@ -1,123 +0,0 @@ -Automatically generated README for this automation recipe: **print-hello-world-java** - -Category: **Tests** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-hello-world-java,3b62dc46cce3489c) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *print,hello world,hello-world,hello,world,java* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "print hello world hello-world hello world java" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=print,hello world,hello-world,hello,world,java` - -`cm run script --tags=print,hello world,hello-world,hello,world,java ` - -*or* - -`cmr "print hello world hello-world hello world java"` - -`cmr "print hello world hello-world hello world java " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'print,hello world,hello-world,hello,world,java' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="print,hello world,hello-world,hello,world,java"``` - -#### Run this script via Docker (beta) - -`cm docker script "print hello world hello-world hello world java" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,java - * CM names: `--adr.['java']...` - - CM script: [get-java](https://github.com/mlcommons/cm4mlops/tree/master/script/get-java) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-java/_cm.json) - -___ -### Script output -`cmr "print hello world hello-world hello world java " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-hello-world-javac/index.md b/docs/Tests/print-hello-world-javac/index.md deleted file mode 100644 index 3e1db5b11..000000000 --- a/docs/Tests/print-hello-world-javac/index.md +++ /dev/null @@ -1,123 +0,0 @@ -Automatically generated README for this automation recipe: **print-hello-world-javac** - -Category: **Tests** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-hello-world-javac,040fafd538104819) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *print,hello world,hello-world,hello,world,javac* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "print hello world hello-world hello world javac" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=print,hello world,hello-world,hello,world,javac` - -`cm run script --tags=print,hello world,hello-world,hello,world,javac ` - -*or* - -`cmr "print hello world hello-world hello world javac"` - -`cmr "print hello world hello-world hello world javac " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'print,hello world,hello-world,hello,world,javac' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="print,hello world,hello-world,hello,world,javac"``` - -#### Run this script via Docker (beta) - -`cm docker script "print hello world hello-world hello world javac" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,javac - * CM names: `--adr.['javac']...` - - CM script: [get-javac](https://github.com/mlcommons/cm4mlops/tree/master/script/get-javac) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-javac/_cm.json) - -___ -### Script output -`cmr "print hello world hello-world hello world javac " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-hello-world-py/index.md b/docs/Tests/print-hello-world-py/index.md deleted file mode 100644 index ddfa31d1d..000000000 --- a/docs/Tests/print-hello-world-py/index.md +++ /dev/null @@ -1,129 +0,0 @@ -Automatically generated README for this automation recipe: **print-hello-world-py** - -Category: **Tests** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-hello-world-py,d83274c7eb754d90) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *print,hello world,hello-world,hello,world,python* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "print hello world hello-world hello world python" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=print,hello world,hello-world,hello,world,python` - -`cm run script --tags=print,hello world,hello-world,hello,world,python ` - -*or* - -`cmr "print hello world hello-world hello world python"` - -`cmr "print hello world hello-world hello world python " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'print,hello world,hello-world,hello,world,python' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="print,hello world,hello-world,hello,world,python"``` - -#### Run this script via Docker (beta) - -`cm docker script "print hello world hello-world hello world python" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,sys-utils-cm - - CM script: [get-sys-utils-cm](https://github.com/mlcommons/cm4mlops/tree/master/script/get-sys-utils-cm) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * print,python-version - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_SKIP_PRINT': ['True'], 'CM_SKIP_PRINT2': ['True']}` - - CM script: [print-python-version](https://github.com/mlcommons/cm4mlops/tree/master/script/print-python-version) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world-py/_cm.json) - -___ -### Script output -`cmr "print hello world hello-world hello world python " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-hello-world/index.md b/docs/Tests/print-hello-world/index.md deleted file mode 100644 index 1505464e7..000000000 --- a/docs/Tests/print-hello-world/index.md +++ /dev/null @@ -1,155 +0,0 @@ -Automatically generated README for this automation recipe: **print-hello-world** - -Category: **Tests** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-hello-world,b9f0acba4aca4baa) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *print,hello-world,hello world,hello,world,native-script,native,script* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "print hello-world hello world hello world native-script native script" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=print,hello-world,hello world,hello,world,native-script,native,script` - -`cm run script --tags=print,hello-world,hello world,hello,world,native-script,native,script[,variations] [--input_flags]` - -*or* - -`cmr "print hello-world hello world hello world native-script native script"` - -`cmr "print hello-world hello world hello world native-script native script [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'print,hello-world,hello world,hello,world,native-script,native,script' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="print,hello-world,hello world,hello,world,native-script,native,script"``` - -#### Run this script via Docker (beta) - -`cm docker script "print hello-world hello world hello world native-script native script[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_skip_print_env` - - Environment variables: - - *CM_PRINT_HELLO_WORLD_SKIP_PRINT_ENV*: `yes` - - Workflow: - * `_text.#` - - Environment variables: - - *CM_PRINT_HELLO_WORLD_TEXT*: `#` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--test1=value` → `CM_ENV_TEST1=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "test1":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_ENV_TEST1: `TEST1` - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/_cm.yaml) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/_cm.yaml) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-hello-world/_cm.yaml) - -___ -### Script output -`cmr "print hello-world hello world hello world native-script native script [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_ENV_TEST*` -#### New environment keys auto-detected from customize diff --git a/docs/Tests/print-python-version/index.md b/docs/Tests/print-python-version/index.md deleted file mode 100644 index 09db44012..000000000 --- a/docs/Tests/print-python-version/index.md +++ /dev/null @@ -1,121 +0,0 @@ -Automatically generated README for this automation recipe: **print-python-version** - -Category: **Tests** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=print-python-version,d3a538fa4abb464b) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *print,python,version,python-version* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "print python version python-version" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=print,python,version,python-version` - -`cm run script --tags=print,python,version,python-version ` - -*or* - -`cmr "print python version python-version"` - -`cmr "print python version python-version " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'print,python,version,python-version' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="print,python,version,python-version"``` - -#### Run this script via Docker (beta) - -`cm docker script "print python version python-version" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/_cm.json)*** - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/print-python-version/_cm.json) - -___ -### Script output -`cmr "print python version python-version " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Tests/run-python/index.md b/docs/Tests/run-python/index.md deleted file mode 100644 index 7ab1b2aec..000000000 --- a/docs/Tests/run-python/index.md +++ /dev/null @@ -1,138 +0,0 @@ -Automatically generated README for this automation recipe: **run-python** - -Category: **Tests** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=run-python,75a46d84ee6f49b0) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *run,python* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "run python" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=run,python` - -`cm run script --tags=run,python [--input_flags]` - -*or* - -`cmr "run python"` - -`cmr "run python " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'run,python' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="run,python"``` - -#### Run this script via Docker (beta) - -`cm docker script "run python" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--command=value` → `CM_RUN_PYTHON_CMD=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "command":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/run-python/_cm.json) - -___ -### Script output -`cmr "run python " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Tests/test-deps-conditions/index.md b/docs/Tests/test-deps-conditions/index.md deleted file mode 100644 index 4c0ee33ed..000000000 --- a/docs/Tests/test-deps-conditions/index.md +++ /dev/null @@ -1,151 +0,0 @@ -Automatically generated README for this automation recipe: **test-deps-conditions** - -Category: **Tests** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=test-deps-conditions,5cb82aee472640df) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *test,deps,conditions* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "test deps conditions" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=test,deps,conditions` - -`cm run script --tags=test,deps,conditions [--input_flags]` - -*or* - -`cmr "test deps conditions"` - -`cmr "test deps conditions " [--input_flags]` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'test,deps,conditions' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="test,deps,conditions"``` - -#### Run this script via Docker (beta) - -`cm docker script "test deps conditions" [--input_flags]` - -___ -### Customization - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--test1=value` → `CM_ENV1=value` -* `--test2=value` → `CM_ENV2=value` -* `--test3=value` → `CM_ENV3=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "test1":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions/_cm.yaml)*** - * print,native,hello-world,_skip_print_env - - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) - * print,native,hello-world,_skip_print_env,_text.SKIP_IF_ALL_ENV - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_ENV1': [True], 'CM_ENV2': [True], 'CM_ENV3': [True]}` - - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) - * print,native,hello-world,_skip_print_env,_text.SKIP_IF_ANY_ENV - * Skip this dependenecy only if any of ENV vars are set:
    -`{'CM_ENV1': [True], 'CM_ENV2': [True], 'CM_ENV3': [True]}` - - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) - * print,native,hello-world,_skip_print_env,_text.ENABLE_IF_ALL_ENV - * Enable this dependency only if all ENV vars are set:
    -`{'CM_ENV1': [True], 'CM_ENV2': [True], 'CM_ENV3': [True]}` - - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) - * print,native,hello-world,_skip_print_env,_text.ENABLE_IF_ANY_ENV - * Enable this dependency only if any of ENV vars are set:
    -`{'CM_ENV1': [True], 'CM_ENV2': [True], 'CM_ENV3': [True]}` - - CM script: [print-hello-world](https://github.com/mlcommons/cm4mlops/tree/master/script/print-hello-world) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions/_cm.yaml) - 1. ***Run native script if exists*** - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions/_cm.yaml) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-deps-conditions/_cm.yaml) - -___ -### Script output -`cmr "test deps conditions " [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Tests/test-download-and-extract-artifacts/index.md b/docs/Tests/test-download-and-extract-artifacts/index.md deleted file mode 100644 index c0b6cf1de..000000000 --- a/docs/Tests/test-download-and-extract-artifacts/index.md +++ /dev/null @@ -1,123 +0,0 @@ -Automatically generated README for this automation recipe: **test-download-and-extract-artifacts** - -Category: **Tests** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=test-download-and-extract-artifacts,51dde7580b404b27) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts)* -* CM meta description for this script: *[_cm.yaml](_cm.yaml)* -* All CM tags to find and reuse this script (see in above meta description): *test,download-and-extract-artifacts* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "test download-and-extract-artifacts" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=test,download-and-extract-artifacts` - -`cm run script --tags=test,download-and-extract-artifacts ` - -*or* - -`cmr "test download-and-extract-artifacts"` - -`cmr "test download-and-extract-artifacts " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'test,download-and-extract-artifacts' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="test,download-and-extract-artifacts"``` - -#### Run this script via Docker (beta) - -`cm docker script "test download-and-extract-artifacts" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/_cm.yaml)*** - * download,file,_url.https://zenodo.org/record/4735647/files/resnet50_v1.onnx - - CM script: [download-file](https://github.com/mlcommons/cm4mlops/tree/master/script/download-file) - * download-and-extract,_extract,_url.https://zenodo.org/record/5597155/files/3dunet_kits19_128x128x128.tf.zip?download=1 - - CM script: [download-and-extract](https://github.com/mlcommons/cm4mlops/tree/master/script/download-and-extract) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/_cm.yaml) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/_cm.yaml) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-download-and-extract-artifacts/_cm.yaml) - -___ -### Script output -`cmr "test download-and-extract-artifacts " -j` -#### New environment keys (filter) - -* `CM_REPRODUCE_PAPER_XYZ*` -#### New environment keys auto-detected from customize diff --git a/docs/Tests/test-set-sys-user-cm/index.md b/docs/Tests/test-set-sys-user-cm/index.md deleted file mode 100644 index 5edef2acb..000000000 --- a/docs/Tests/test-set-sys-user-cm/index.md +++ /dev/null @@ -1,118 +0,0 @@ -Automatically generated README for this automation recipe: **test-set-sys-user-cm** - -Category: **Tests** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=test-set-sys-user-cm,25fdfcf0fe434af2) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *demo,set,sys-user,cm,sys-user-cm* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "demo set sys-user cm sys-user-cm" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=demo,set,sys-user,cm,sys-user-cm` - -`cm run script --tags=demo,set,sys-user,cm,sys-user-cm ` - -*or* - -`cmr "demo set sys-user cm sys-user-cm"` - -`cmr "demo set sys-user cm sys-user-cm " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'demo,set,sys-user,cm,sys-user-cm' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="demo,set,sys-user,cm,sys-user-cm"``` - -#### Run this script via Docker (beta) - -`cm docker script "demo set sys-user cm sys-user-cm" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - -* CM_SUDO: `sudo` - -
    - -___ -### Dependencies on other CM scripts - - - 1. Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/_cm.json) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/test-set-sys-user-cm/_cm.json) - -___ -### Script output -`cmr "demo set sys-user cm sys-user-cm " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/Tests/upgrade-python-pip/index.md b/docs/Tests/upgrade-python-pip/index.md deleted file mode 100644 index cacd17ff8..000000000 --- a/docs/Tests/upgrade-python-pip/index.md +++ /dev/null @@ -1,123 +0,0 @@ -Automatically generated README for this automation recipe: **upgrade-python-pip** - -Category: **Tests** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=upgrade-python-pip,4343ed2d9a974923) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *upgrade,python,pip,python-pip* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "upgrade python pip python-pip" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=upgrade,python,pip,python-pip` - -`cm run script --tags=upgrade,python,pip,python-pip ` - -*or* - -`cmr "upgrade python pip python-pip"` - -`cmr "upgrade python pip python-pip " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'upgrade,python,pip,python-pip' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="upgrade,python,pip,python-pip"``` - -#### Run this script via Docker (beta) - -`cm docker script "upgrade python pip python-pip" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,python3 - * CM names: `--adr.['python', 'python3']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - 1. Run "preprocess" function from customize.py - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/_cm.json) - 1. ***Run native script if exists*** - * [run.bat](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/run.bat) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/_cm.json) - 1. Run "postrocess" function from customize.py - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/upgrade-python-pip/_cm.json) - -___ -### Script output -`cmr "upgrade python pip python-pip " -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/TinyML-automation/create-fpgaconvnet-app-tinyml/index.md b/docs/TinyML-automation/create-fpgaconvnet-app-tinyml/index.md deleted file mode 100644 index c1644ada5..000000000 --- a/docs/TinyML-automation/create-fpgaconvnet-app-tinyml/index.md +++ /dev/null @@ -1,156 +0,0 @@ -Automatically generated README for this automation recipe: **create-fpgaconvnet-app-tinyml** - -Category: **TinyML automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=create-fpgaconvnet-app-tinyml,618f3520e98e4728) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *create,app,fpgaconvnet* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "create app fpgaconvnet" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=create,app,fpgaconvnet` - -`cm run script --tags=create,app,fpgaconvnet[,variations] ` - -*or* - -`cmr "create app fpgaconvnet"` - -`cmr "create app fpgaconvnet [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'create,app,fpgaconvnet' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="create,app,fpgaconvnet"``` - -#### Run this script via Docker (beta) - -`cm docker script "create app fpgaconvnet[variations]" ` - -___ -### Customization - - -#### Variations - - * Group "**benchmark**" -
    - Click here to expand this section. - - * **`_ic`** (default) - - Workflow: - -
    - - - * Group "**board**" -
    - Click here to expand this section. - - * **`_zc706`** (default) - - Environment variables: - - *CM_TINY_BOARD*: `zc706` - - Workflow: - -
    - - -#### Default variations - -`_ic,_zc706` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/_cm.json)*** - * create,fpgaconvnet,config - * CM names: `--adr.['config-generator']...` - - CM script: [create-fpgaconvnet-config-tinyml](https://github.com/mlcommons/cm4mlops/tree/master/script/create-fpgaconvnet-config-tinyml) - * get,xilinx,sdk - * CM names: `--adr.['xilinx-sdk']...` - - CM script: [get-xilinx-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-xilinx-sdk) - * get,tensorflow - * CM names: `--adr.['tensorflow']...` - - CM script: [install-tensorflow-from-src](https://github.com/mlcommons/cm4mlops/tree/master/script/install-tensorflow-from-src) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-app-tinyml/_cm.json) - -___ -### Script output -`cmr "create app fpgaconvnet [,variations]" -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/TinyML-automation/create-fpgaconvnet-config-tinyml/index.md b/docs/TinyML-automation/create-fpgaconvnet-config-tinyml/index.md deleted file mode 100644 index 4f6b5eb62..000000000 --- a/docs/TinyML-automation/create-fpgaconvnet-config-tinyml/index.md +++ /dev/null @@ -1,173 +0,0 @@ -Automatically generated README for this automation recipe: **create-fpgaconvnet-config-tinyml** - -Category: **TinyML automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=create-fpgaconvnet-config-tinyml,f6cdad166cfa47bc) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *create,config,fpgaconvnet* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "create config fpgaconvnet" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=create,config,fpgaconvnet` - -`cm run script --tags=create,config,fpgaconvnet[,variations] ` - -*or* - -`cmr "create config fpgaconvnet"` - -`cmr "create config fpgaconvnet [variations]" ` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'create,config,fpgaconvnet' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="create,config,fpgaconvnet"``` - -#### Run this script via Docker (beta) - -`cm docker script "create config fpgaconvnet[variations]" ` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_zc706,ic` - - Environment variables: - - *CM_TINY_NETWORK_NAME*: `zc706-resnet` - - Workflow: - -
    - - - * Group "**benchmark**" -
    - Click here to expand this section. - - * **`_ic`** (default) - - Workflow: - -
    - - - * Group "**board**" -
    - Click here to expand this section. - - * **`_zc706`** (default) - - Environment variables: - - *CM_TINY_BOARD*: `zc706` - - Workflow: - -
    - - -#### Default variations - -`_ic,_zc706` -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/_cm.json)*** - * get,python3 - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,ml-model,tiny - * CM names: `--adr.['ml-model']...` - - CM script: [get-ml-model-tiny-resnet](https://github.com/mlcommons/cm4mlops/tree/master/script/get-ml-model-tiny-resnet) - * get,git,repo,_repo.https://github.com/mlcommons/submissions_tiny_v1.1 - - CM script: [get-git-repo](https://github.com/mlcommons/cm4mlops/tree/master/script/get-git-repo) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/create-fpgaconvnet-config-tinyml/_cm.json) - -___ -### Script output -`cmr "create config fpgaconvnet [,variations]" -j` -#### New environment keys (filter) - -* `CM_TINY_FPGACONVNET*` -#### New environment keys auto-detected from customize - -* `CM_TINY_FPGACONVNET_' + network_env_name + '_CODE_PATH` -* `CM_TINY_FPGACONVNET_' + network_env_name + '_RUN_DIR` -* `CM_TINY_FPGACONVNET_CONFIG_FILE_' + network_env_name + '_PATH` -* `CM_TINY_FPGACONVNET_NETWORK_ENV_NAME` -* `CM_TINY_FPGACONVNET_NETWORK_NAME` \ No newline at end of file diff --git a/docs/TinyML-automation/flash-tinyml-binary/index.md b/docs/TinyML-automation/flash-tinyml-binary/index.md deleted file mode 100644 index 8d41da6b9..000000000 --- a/docs/TinyML-automation/flash-tinyml-binary/index.md +++ /dev/null @@ -1,175 +0,0 @@ -Automatically generated README for this automation recipe: **flash-tinyml-binary** - -Category: **TinyML automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=flash-tinyml-binary,98913babb43f4fcb) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *flash,tiny,mlperf,mlcommons* -* Output cached? *False* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "flash tiny mlperf mlcommons" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=flash,tiny,mlperf,mlcommons` - -`cm run script --tags=flash,tiny,mlperf,mlcommons[,variations] [--input_flags]` - -*or* - -`cmr "flash tiny mlperf mlcommons"` - -`cmr "flash tiny mlperf mlcommons [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'flash,tiny,mlperf,mlcommons' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="flash,tiny,mlperf,mlcommons"``` - -#### Run this script via Docker (beta) - -`cm docker script "flash tiny mlperf mlcommons[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_NRF` - - Workflow: - * `_NUCLEO` - - Workflow: - * `_ad` - - Workflow: - * `_cmsis_nn` - - Workflow: - * `_ic` - - Workflow: - * `_kws` - - Workflow: - * `_native` - - Workflow: - * `_vww` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--build_dir=value` → `CM_TINY_BUILD_DIR=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "build_dir":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `r1.0` - -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - * get,zephyr - * CM names: `--adr.['zephyr']...` - - CM script: [get-zephyr](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr) - * get,zephyr-sdk - * CM names: `--adr.['zephyr-sdk']...` - - CM script: [get-zephyr-sdk](https://github.com/mlcommons/cm4mlops/tree/master/script/get-zephyr-sdk) - * reproduce,tiny,mlperf - * Skip this dependenecy only if all ENV vars are set:
    -`{'CM_TINY_BUILD_DIR': ['on']}` - - CM script: [reproduce-mlperf-octoml-tinyml-results](https://github.com/mlcommons/cm4mlops/tree/master/script/reproduce-mlperf-octoml-tinyml-results) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/flash-tinyml-binary/_cm.json) - -___ -### Script output -`cmr "flash tiny mlperf mlcommons [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -#### New environment keys auto-detected from customize diff --git a/docs/TinyML-automation/get-microtvm/index.md b/docs/TinyML-automation/get-microtvm/index.md deleted file mode 100644 index 54ad7bfa4..000000000 --- a/docs/TinyML-automation/get-microtvm/index.md +++ /dev/null @@ -1,162 +0,0 @@ -Automatically generated README for this automation recipe: **get-microtvm** - -Category: **TinyML automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-microtvm,a9cad70972a140b9) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,src,source,microtvm,tiny* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get src source microtvm tiny" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,src,source,microtvm,tiny` - -`cm run script --tags=get,src,source,microtvm,tiny[,variations] [--input_flags]` - -*or* - -`cmr "get src source microtvm tiny"` - -`cmr "get src source microtvm tiny [variations]" [--input_flags]` - - -* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,src,source,microtvm,tiny' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,src,source,microtvm,tiny"``` - -#### Run this script via Docker (beta) - -`cm docker script "get src source microtvm tiny[variations]" [--input_flags]` - -___ -### Customization - - -#### Variations - - * *No group (any variation can be selected)* -
    - Click here to expand this section. - - * `_full-history` - - Environment variables: - - *CM_GIT_DEPTH*: `--depth 10` - - Workflow: - * `_short-history` - - Environment variables: - - *CM_GIT_DEPTH*: `--depth 10` - - Workflow: - -
    - - -#### Script flags mapped to environment -
    -Click here to expand this section. - -* `--ssh=value` → `CM_GIT_SSH=value` - -**Above CLI flags can be used in the Python CM API as follows:** - -```python -r=cm.access({... , "ssh":...} -``` - -
    - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `main` - -* `custom` -* `main` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-microtvm/_cm.json) - -___ -### Script output -`cmr "get src source microtvm tiny [,variations]" [--input_flags] -j` -#### New environment keys (filter) - -* `CM_MICROTVM_*` -#### New environment keys auto-detected from customize - -* `CM_MICROTVM_SOURCE` \ No newline at end of file diff --git a/docs/TinyML-automation/get-zephyr-sdk/index.md b/docs/TinyML-automation/get-zephyr-sdk/index.md deleted file mode 100644 index 07c2df73b..000000000 --- a/docs/TinyML-automation/get-zephyr-sdk/index.md +++ /dev/null @@ -1,126 +0,0 @@ -Automatically generated README for this automation recipe: **get-zephyr-sdk** - -Category: **TinyML automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-zephyr-sdk,c70ae1a7567f4a7b) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,zephyr-sdk* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get zephyr-sdk" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,zephyr-sdk` - -`cm run script --tags=get,zephyr-sdk ` - -*or* - -`cmr "get zephyr-sdk"` - -`cmr "get zephyr-sdk " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,zephyr-sdk' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,zephyr-sdk"``` - -#### Run this script via Docker (beta) - -`cm docker script "get zephyr-sdk" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `0.13.2` - -* `0.13.1` -* `0.13.2` -* `0.15.0` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/_cm.json)*** - * detect,os - - CM script: [detect-os](https://github.com/mlcommons/cm4mlops/tree/master/script/detect-os) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/_cm.json) - 1. ***Run native script if exists*** - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr-sdk/_cm.json) - -___ -### Script output -`cmr "get zephyr-sdk " -j` -#### New environment keys (filter) - -* `ZEPHYR_*` -#### New environment keys auto-detected from customize diff --git a/docs/TinyML-automation/get-zephyr/index.md b/docs/TinyML-automation/get-zephyr/index.md deleted file mode 100644 index e36cd805c..000000000 --- a/docs/TinyML-automation/get-zephyr/index.md +++ /dev/null @@ -1,132 +0,0 @@ -Automatically generated README for this automation recipe: **get-zephyr** - -Category: **TinyML automation** - -License: **Apache 2.0** - -Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) - ---- -*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-zephyr,d4105c2cdb044276) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* - ---- -#### Summary - -* CM GitHub repository: *[mlcommons@cm4mlops](https://github.com/mlcommons/cm4mlops/tree/dev)* -* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr)* -* CM meta description for this script: *[_cm.json](_cm.json)* -* All CM tags to find and reuse this script (see in above meta description): *get,zephyr* -* Output cached? *True* -* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts - - ---- -### Reuse this script in your project - -#### Install MLCommons CM automation meta-framework - -* [Install CM](https://access.cknowledge.org/playground/?action=install) -* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) - -#### Pull CM repository with this automation recipe (CM script) - -```cm pull repo mlcommons@cm4mlops``` - -#### Print CM help from the command line - -````cmr "get zephyr" --help```` - -#### Customize and run this script from the command line with different variations and flags - -`cm run script --tags=get,zephyr` - -`cm run script --tags=get,zephyr ` - -*or* - -`cmr "get zephyr"` - -`cmr "get zephyr " ` - - -#### Run this script from Python - -
    -Click here to expand this section. - -```python - -import cmind - -r = cmind.access({'action':'run' - 'automation':'script', - 'tags':'get,zephyr' - 'out':'con', - ... - (other input keys for this script) - ... - }) - -if r['return']>0: - print (r['error']) - -``` - -
    - - -#### Run this script via GUI - -```cmr "cm gui" --script="get,zephyr"``` - -#### Run this script via Docker (beta) - -`cm docker script "get zephyr" ` - -___ -### Customization - -#### Default environment - -
    -Click here to expand this section. - -These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. - - -
    - -#### Versions -Default version: `v2.7` - -* `v2.7` -___ -### Dependencies on other CM scripts - - - 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/_cm.json)*** - * get,python3 - * CM names: `--adr.['python3', 'python']...` - - CM script: [get-python3](https://github.com/mlcommons/cm4mlops/tree/master/script/get-python3) - * get,cmake - - CM script: [get-cmake](https://github.com/mlcommons/cm4mlops/tree/master/script/get-cmake) - * get,generic-python-lib,_west - - CM script: [get-generic-python-lib](https://github.com/mlcommons/cm4mlops/tree/master/script/get-generic-python-lib) - 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/customize.py)*** - 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/_cm.json) - 1. ***Run native script if exists*** - * [run-ubuntu.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/run-ubuntu.sh) - * [run.sh](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/run.sh) - 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/_cm.json) - 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/customize.py)*** - 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/cm4mlops/tree/dev/script/get-zephyr/_cm.json) - -___ -### Script output -`cmr "get zephyr " -j` -#### New environment keys (filter) - -* `CM_ZEPHYR_*` -#### New environment keys auto-detected from customize - -* `CM_ZEPHYR_DIR` \ No newline at end of file diff --git a/docs/TinyML-automation/index.md b/docs/TinyML-automation/index.md new file mode 100644 index 000000000..b45795983 --- /dev/null +++ b/docs/TinyML-automation/index.md @@ -0,0 +1,8 @@ +The TinyML automation category contains the following scripts: + +- [create-fpgaconvnet-app-tinyml](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/create-fpgaconvnet-app-tinyml/README.md) +- [create-fpgaconvnet-config-tinyml](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/create-fpgaconvnet-config-tinyml/README.md) +- [flash-tinyml-binary](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/flash-tinyml-binary/README.md) +- [get-microtvm](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-microtvm/README.md) +- [get-zephyr](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-zephyr/README.md) +- [get-zephyr-sdk](https://github.com/anandhu-eng/cm4mlops/tree/mlperf-inference/script/get-zephyr-sdk/README.md) diff --git a/docs/index.md b/docs/index.md index 32d46ee88..b65cc02b9 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1 +1,51 @@ -../README.md \ No newline at end of file +## Unified and cross-platform CM interface for DevOps, MLOps and MLPerf + +[![License](https://img.shields.io/badge/License-Apache%202.0-green)](LICENSE.md) +[![Python Version](https://img.shields.io/badge/python-3+-blue.svg)](https://github.com/mlcommons/ck/tree/master/cm/cmind) +[![Powered by CM](https://img.shields.io/badge/Powered_by-MLCommons%20CM-blue)](https://github.com/mlcommons/ck). +[![Downloads](https://static.pepy.tech/badge/cmind)](https://pepy.tech/project/cmind) + +This repository contains reusable and cross-platform automation recipes to run DevOps, MLOps, AIOps and MLPerf +via a simple and human-readable [Collective Mind interface (CM)](https://github.com/mlcommons/ck) +while adapting to different opearting systems, software and hardware. + +All СM scripts have a simple Python API, extensible JSON/YAML meta description +and unifed input/output to make them reusable in different projects either individually +or by chaining them together into portable automation workflows, applications +and web services adaptable to continuously changing models, data sets, software and hardware. + +These automation recipes are being developed and maintained +by the [MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) +with [great contributions](CONTRIBUTING.md) from the community. + +## Tests + +[![CM script automation test](https://github.com/mlcommons/cm4mlops/actions/workflows/test-cm-scripts.yml/badge.svg)](https://github.com/mlcommons/cm4mlops/actions/workflows/test-cm-scripts.yml) +[![CM script automation features test](https://github.com/mlcommons/cm4mlops/actions/workflows/test-cm-script-features.yml/badge.svg)](https://github.com/mlcommons/cm4mlops/actions/workflows/test-cm-script-features.yml) +[![MLPerf loadgen with HuggingFace bert onnx fp32 squad model](https://github.com/mlcommons/cm4mlops/actions/workflows/test-mlperf-loadgen-onnx-huggingface-bert-fp32-squad.yml/badge.svg)](https://github.com/mlcommons/cm4mlops/actions/workflows/test-mlperf-loadgen-onnx-huggingface-bert-fp32-squad.yml) +[![MLPerf inference MLCommons C++ ResNet50](https://github.com/mlcommons/cm4mlops/actions/workflows/test-mlperf-inference-mlcommons-cpp-resnet50.yml/badge.svg)](https://github.com/mlcommons/cm4mlops/actions/workflows/test-mlperf-inference-mlcommons-cpp-resnet50.yml) +[![image classification with ONNX](https://github.com/mlcommons/cm4mlops/actions/workflows/test-image-classification-onnx.yml/badge.svg)](https://github.com/mlcommons/cm4mlops/actions/workflows/test-image-classification-onnx.yml) + + +## Catalog + +See the automatically generated catalog [online](https://access.cknowledge.org/playground/?action=scripts). + +## License + +[Apache 2.0](LICENSE.md) + +## Copyright + +2022-2024 [MLCommons](https://mlcommons.org) + +## Acknowledgments + +This open-source technology is being developed by the [MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) +as a community effort based on user feedback. + +We would like to thank all [volunteers, collaborators and contributors](CONTRIBUTING.md) +for their support, fruitful discussions, and useful feedback! + +We thank the [cTuning foundation](https://cTuning.org), [cKnowledge.org](https://cKnowledge.org) +and [MLCommons](https://mlcommons.org) for sponsoring this project! diff --git a/mkdocs.yml b/mkdocs.yml index cea23b114..faf15349e 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -19,293 +19,38 @@ theme: - navigation.top - toc.follow nav: - - CM Scripts: + - CM Scripts: - index.md - - Python automation: - - activate-python-venv: Python-automation/activate-python-venv/index.md - - get-generic-python-lib: Python-automation/get-generic-python-lib/index.md - - get-python3: Python-automation/get-python3/index.md - - install-generic-conda-package: Python-automation/install-generic-conda-package/index.md - - install-python-src: Python-automation/install-python-src/index.md - - install-python-venv: Python-automation/install-python-venv/index.md - - MLPerf benchmark support: - - add-custom-nvidia-system: MLPerf-benchmark-support/add-custom-nvidia-system/index.md - - benchmark-any-mlperf-inference-implementation: MLPerf-benchmark-support/benchmark-any-mlperf-inference-implementation/index.md - - build-mlperf-inference-server-nvidia: MLPerf-benchmark-support/build-mlperf-inference-server-nvidia/index.md - - generate-mlperf-inference-submission: MLPerf-benchmark-support/generate-mlperf-inference-submission/index.md - - generate-mlperf-inference-user-conf: MLPerf-benchmark-support/generate-mlperf-inference-user-conf/index.md - - generate-mlperf-tiny-report: MLPerf-benchmark-support/generate-mlperf-tiny-report/index.md - - generate-mlperf-tiny-submission: MLPerf-benchmark-support/generate-mlperf-tiny-submission/index.md - - generate-nvidia-engine: MLPerf-benchmark-support/generate-nvidia-engine/index.md - - get-mlperf-inference-intel-scratch-space: MLPerf-benchmark-support/get-mlperf-inference-intel-scratch-space/index.md - - get-mlperf-inference-loadgen: MLPerf-benchmark-support/get-mlperf-inference-loadgen/index.md - - get-mlperf-inference-nvidia-common-code: MLPerf-benchmark-support/get-mlperf-inference-nvidia-common-code/index.md - - get-mlperf-inference-nvidia-scratch-space: MLPerf-benchmark-support/get-mlperf-inference-nvidia-scratch-space/index.md - - get-mlperf-inference-results: MLPerf-benchmark-support/get-mlperf-inference-results/index.md - - get-mlperf-inference-results-dir: MLPerf-benchmark-support/get-mlperf-inference-results-dir/index.md - - get-mlperf-inference-src: MLPerf-benchmark-support/get-mlperf-inference-src/index.md - - get-mlperf-inference-submission-dir: MLPerf-benchmark-support/get-mlperf-inference-submission-dir/index.md - - get-mlperf-inference-sut-configs: MLPerf-benchmark-support/get-mlperf-inference-sut-configs/index.md - - get-mlperf-inference-sut-description: MLPerf-benchmark-support/get-mlperf-inference-sut-description/index.md - - get-mlperf-logging: MLPerf-benchmark-support/get-mlperf-logging/index.md - - get-mlperf-power-dev: MLPerf-benchmark-support/get-mlperf-power-dev/index.md - - get-mlperf-tiny-eembc-energy-runner-src: MLPerf-benchmark-support/get-mlperf-tiny-eembc-energy-runner-src/index.md - - get-mlperf-tiny-src: MLPerf-benchmark-support/get-mlperf-tiny-src/index.md - - get-mlperf-training-nvidia-code: MLPerf-benchmark-support/get-mlperf-training-nvidia-code/index.md - - get-mlperf-training-src: MLPerf-benchmark-support/get-mlperf-training-src/index.md - - get-nvidia-mitten: MLPerf-benchmark-support/get-nvidia-mitten/index.md - - get-spec-ptd: MLPerf-benchmark-support/get-spec-ptd/index.md - - import-mlperf-inference-to-experiment: MLPerf-benchmark-support/import-mlperf-inference-to-experiment/index.md - - import-mlperf-tiny-to-experiment: MLPerf-benchmark-support/import-mlperf-tiny-to-experiment/index.md - - import-mlperf-training-to-experiment: MLPerf-benchmark-support/import-mlperf-training-to-experiment/index.md - - install-mlperf-logging-from-src: MLPerf-benchmark-support/install-mlperf-logging-from-src/index.md - - prepare-training-data-bert: MLPerf-benchmark-support/prepare-training-data-bert/index.md - - prepare-training-data-resnet: MLPerf-benchmark-support/prepare-training-data-resnet/index.md - - preprocess-mlperf-inference-submission: MLPerf-benchmark-support/preprocess-mlperf-inference-submission/index.md - - process-mlperf-accuracy: MLPerf-benchmark-support/process-mlperf-accuracy/index.md - - push-mlperf-inference-results-to-github: MLPerf-benchmark-support/push-mlperf-inference-results-to-github/index.md - - run-all-mlperf-models: MLPerf-benchmark-support/run-all-mlperf-models/index.md - - run-mlperf-inference-mobilenet-models: MLPerf-benchmark-support/run-mlperf-inference-mobilenet-models/index.md - - run-mlperf-inference-submission-checker: MLPerf-benchmark-support/run-mlperf-inference-submission-checker/index.md - - run-mlperf-power-client: MLPerf-benchmark-support/run-mlperf-power-client/index.md - - run-mlperf-power-server: MLPerf-benchmark-support/run-mlperf-power-server/index.md - - run-mlperf-training-submission-checker: MLPerf-benchmark-support/run-mlperf-training-submission-checker/index.md - - truncate-mlperf-inference-accuracy-log: MLPerf-benchmark-support/truncate-mlperf-inference-accuracy-log/index.md - - Modular AI-ML application pipeline: - - app-image-classification-onnx-py: Modular-AI-ML-application-pipeline/app-image-classification-onnx-py/index.md - - app-image-classification-tf-onnx-cpp: Modular-AI-ML-application-pipeline/app-image-classification-tf-onnx-cpp/index.md - - app-image-classification-torch-py: Modular-AI-ML-application-pipeline/app-image-classification-torch-py/index.md - - app-image-classification-tvm-onnx-py: Modular-AI-ML-application-pipeline/app-image-classification-tvm-onnx-py/index.md - - app-stable-diffusion-onnx-py: Modular-AI-ML-application-pipeline/app-stable-diffusion-onnx-py/index.md - - Modular application pipeline: - - app-image-corner-detection: Modular-application-pipeline/app-image-corner-detection/index.md - - Modular MLPerf inference benchmark pipeline: - - app-loadgen-generic-python: Modular-MLPerf-inference-benchmark-pipeline/app-loadgen-generic-python/index.md - - app-mlperf-inference: Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference/index.md - - app-mlperf-inference-ctuning-cpp-tflite: Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-ctuning-cpp-tflite/index.md - - app-mlperf-inference-mlcommons-cpp: Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-cpp/index.md - - app-mlperf-inference-mlcommons-python: Modular-MLPerf-inference-benchmark-pipeline/app-mlperf-inference-mlcommons-python/index.md - - benchmark-program-mlperf: Modular-MLPerf-inference-benchmark-pipeline/benchmark-program-mlperf/index.md - - run-mlperf-inference-app: Modular-MLPerf-inference-benchmark-pipeline/run-mlperf-inference-app/index.md - - Modular MLPerf benchmarks: - - app-mlperf-inference-dummy: Modular-MLPerf-benchmarks/app-mlperf-inference-dummy/index.md - - app-mlperf-inference-intel: Modular-MLPerf-benchmarks/app-mlperf-inference-intel/index.md - - app-mlperf-inference-qualcomm: Modular-MLPerf-benchmarks/app-mlperf-inference-qualcomm/index.md - - Reproduce MLPerf benchmarks: - - app-mlperf-inference-nvidia: Reproduce-MLPerf-benchmarks/app-mlperf-inference-nvidia/index.md - - reproduce-mlperf-octoml-tinyml-results: Reproduce-MLPerf-benchmarks/reproduce-mlperf-octoml-tinyml-results/index.md - - reproduce-mlperf-training-nvidia: Reproduce-MLPerf-benchmarks/reproduce-mlperf-training-nvidia/index.md - - wrapper-reproduce-octoml-tinyml-submission: Reproduce-MLPerf-benchmarks/wrapper-reproduce-octoml-tinyml-submission/index.md - - Modular MLPerf training benchmark pipeline: - - app-mlperf-training-nvidia: Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-nvidia/index.md - - app-mlperf-training-reference: Modular-MLPerf-training-benchmark-pipeline/app-mlperf-training-reference/index.md - - DevOps automation: - - benchmark-program: DevOps-automation/benchmark-program/index.md - - compile-program: DevOps-automation/compile-program/index.md - - convert-csv-to-md: DevOps-automation/convert-csv-to-md/index.md - - copy-to-clipboard: DevOps-automation/copy-to-clipboard/index.md - - create-conda-env: DevOps-automation/create-conda-env/index.md - - create-patch: DevOps-automation/create-patch/index.md - - detect-sudo: DevOps-automation/detect-sudo/index.md - - download-and-extract: DevOps-automation/download-and-extract/index.md - - download-file: DevOps-automation/download-file/index.md - - download-torrent: DevOps-automation/download-torrent/index.md - - extract-file: DevOps-automation/extract-file/index.md - - fail: DevOps-automation/fail/index.md - - get-conda: DevOps-automation/get-conda/index.md - - get-git-repo: DevOps-automation/get-git-repo/index.md - - get-github-cli: DevOps-automation/get-github-cli/index.md - - pull-git-repo: DevOps-automation/pull-git-repo/index.md - - push-csv-to-spreadsheet: DevOps-automation/push-csv-to-spreadsheet/index.md - - set-device-settings-qaic: DevOps-automation/set-device-settings-qaic/index.md - - set-echo-off-win: DevOps-automation/set-echo-off-win/index.md - - set-performance-mode: DevOps-automation/set-performance-mode/index.md - - set-sqlite-dir: DevOps-automation/set-sqlite-dir/index.md - - tar-my-folder: DevOps-automation/tar-my-folder/index.md - - Docker automation: - - build-docker-image: Docker-automation/build-docker-image/index.md - - build-dockerfile: Docker-automation/build-dockerfile/index.md - - prune-docker: Docker-automation/prune-docker/index.md - - run-docker-container: Docker-automation/run-docker-container/index.md - - AI-ML optimization: - - calibrate-model-for.qaic: AI-ML-optimization/calibrate-model-for.qaic/index.md - - compile-model-for.qaic: AI-ML-optimization/compile-model-for.qaic/index.md - - prune-bert-models: AI-ML-optimization/prune-bert-models/index.md - - AI-ML models: - - convert-ml-model-huggingface-to-onnx: AI-ML-models/convert-ml-model-huggingface-to-onnx/index.md - - get-bert-squad-vocab: AI-ML-models/get-bert-squad-vocab/index.md - - get-dlrm: AI-ML-models/get-dlrm/index.md - - get-ml-model-3d-unet-kits19: AI-ML-models/get-ml-model-3d-unet-kits19/index.md - - get-ml-model-bert-base-squad: AI-ML-models/get-ml-model-bert-base-squad/index.md - - get-ml-model-bert-large-squad: AI-ML-models/get-ml-model-bert-large-squad/index.md - - get-ml-model-dlrm-terabyte: AI-ML-models/get-ml-model-dlrm-terabyte/index.md - - get-ml-model-efficientnet-lite: AI-ML-models/get-ml-model-efficientnet-lite/index.md - - get-ml-model-gptj: AI-ML-models/get-ml-model-gptj/index.md - - get-ml-model-huggingface-zoo: AI-ML-models/get-ml-model-huggingface-zoo/index.md - - get-ml-model-llama2: AI-ML-models/get-ml-model-llama2/index.md - - get-ml-model-mobilenet: AI-ML-models/get-ml-model-mobilenet/index.md - - get-ml-model-neuralmagic-zoo: AI-ML-models/get-ml-model-neuralmagic-zoo/index.md - - get-ml-model-resnet50: AI-ML-models/get-ml-model-resnet50/index.md - - get-ml-model-retinanet: AI-ML-models/get-ml-model-retinanet/index.md - - get-ml-model-retinanet-nvidia: AI-ML-models/get-ml-model-retinanet-nvidia/index.md - - get-ml-model-rnnt: AI-ML-models/get-ml-model-rnnt/index.md - - get-ml-model-stable-diffusion: AI-ML-models/get-ml-model-stable-diffusion/index.md - - get-ml-model-tiny-resnet: AI-ML-models/get-ml-model-tiny-resnet/index.md - - get-ml-model-using-imagenet-from-model-zoo: AI-ML-models/get-ml-model-using-imagenet-from-model-zoo/index.md - - get-tvm-model: AI-ML-models/get-tvm-model/index.md - - CM automation: - - create-custom-cache-entry: CM-automation/create-custom-cache-entry/index.md - - TinyML automation: - - create-fpgaconvnet-app-tinyml: TinyML-automation/create-fpgaconvnet-app-tinyml/index.md - - create-fpgaconvnet-config-tinyml: TinyML-automation/create-fpgaconvnet-config-tinyml/index.md - - flash-tinyml-binary: TinyML-automation/flash-tinyml-binary/index.md - - get-microtvm: TinyML-automation/get-microtvm/index.md - - get-zephyr: TinyML-automation/get-zephyr/index.md - - get-zephyr-sdk: TinyML-automation/get-zephyr-sdk/index.md - - Cloud automation: - - destroy-terraform: Cloud-automation/destroy-terraform/index.md - - get-aws-cli: Cloud-automation/get-aws-cli/index.md - - get-terraform: Cloud-automation/get-terraform/index.md - - install-aws-cli: Cloud-automation/install-aws-cli/index.md - - install-terraform-from-src: Cloud-automation/install-terraform-from-src/index.md - - run-terraform: Cloud-automation/run-terraform/index.md - - Platform information: - - detect-cpu: Platform-information/detect-cpu/index.md - - detect-os: Platform-information/detect-os/index.md - - Detection or installation of tools and artifacts: - - get-android-sdk: Detection-or-installation-of-tools-and-artifacts/get-android-sdk/index.md - - get-aria2: Detection-or-installation-of-tools-and-artifacts/get-aria2/index.md - - get-bazel: Detection-or-installation-of-tools-and-artifacts/get-bazel/index.md - - get-blis: Detection-or-installation-of-tools-and-artifacts/get-blis/index.md - - get-brew: Detection-or-installation-of-tools-and-artifacts/get-brew/index.md - - get-cmake: Detection-or-installation-of-tools-and-artifacts/get-cmake/index.md - - get-cmsis_5: Detection-or-installation-of-tools-and-artifacts/get-cmsis_5/index.md - - get-docker: Detection-or-installation-of-tools-and-artifacts/get-docker/index.md - - get-generic-sys-util: Detection-or-installation-of-tools-and-artifacts/get-generic-sys-util/index.md - - get-google-test: Detection-or-installation-of-tools-and-artifacts/get-google-test/index.md - - get-java: Detection-or-installation-of-tools-and-artifacts/get-java/index.md - - get-javac: Detection-or-installation-of-tools-and-artifacts/get-javac/index.md - - get-lib-armnn: Detection-or-installation-of-tools-and-artifacts/get-lib-armnn/index.md - - get-lib-dnnl: Detection-or-installation-of-tools-and-artifacts/get-lib-dnnl/index.md - - get-lib-protobuf: Detection-or-installation-of-tools-and-artifacts/get-lib-protobuf/index.md - - get-lib-qaic-api: Detection-or-installation-of-tools-and-artifacts/get-lib-qaic-api/index.md - - get-nvidia-docker: Detection-or-installation-of-tools-and-artifacts/get-nvidia-docker/index.md - - get-openssl: Detection-or-installation-of-tools-and-artifacts/get-openssl/index.md - - get-rclone: Detection-or-installation-of-tools-and-artifacts/get-rclone/index.md - - get-sys-utils-cm: Detection-or-installation-of-tools-and-artifacts/get-sys-utils-cm/index.md - - get-sys-utils-min: Detection-or-installation-of-tools-and-artifacts/get-sys-utils-min/index.md - - get-xilinx-sdk: Detection-or-installation-of-tools-and-artifacts/get-xilinx-sdk/index.md - - get-zendnn: Detection-or-installation-of-tools-and-artifacts/get-zendnn/index.md - - install-bazel: Detection-or-installation-of-tools-and-artifacts/install-bazel/index.md - - install-cmake-prebuilt: Detection-or-installation-of-tools-and-artifacts/install-cmake-prebuilt/index.md - - install-gflags: Detection-or-installation-of-tools-and-artifacts/install-gflags/index.md - - install-github-cli: Detection-or-installation-of-tools-and-artifacts/install-github-cli/index.md - - install-numactl-from-src: Detection-or-installation-of-tools-and-artifacts/install-numactl-from-src/index.md - - install-openssl: Detection-or-installation-of-tools-and-artifacts/install-openssl/index.md - - Compiler automation: - - get-aocl: Compiler-automation/get-aocl/index.md - - get-cl: Compiler-automation/get-cl/index.md - - get-compiler-flags: Compiler-automation/get-compiler-flags/index.md - - get-compiler-rust: Compiler-automation/get-compiler-rust/index.md - - get-gcc: Compiler-automation/get-gcc/index.md - - get-go: Compiler-automation/get-go/index.md - - get-llvm: Compiler-automation/get-llvm/index.md - - install-gcc-src: Compiler-automation/install-gcc-src/index.md - - install-ipex-from-src: Compiler-automation/install-ipex-from-src/index.md - - install-llvm-prebuilt: Compiler-automation/install-llvm-prebuilt/index.md - - install-llvm-src: Compiler-automation/install-llvm-src/index.md - - install-onednn-from-src: Compiler-automation/install-onednn-from-src/index.md - - install-onnxruntime-from-src: Compiler-automation/install-onnxruntime-from-src/index.md - - install-pytorch-from-src: Compiler-automation/install-pytorch-from-src/index.md - - install-pytorch-kineto-from-src: Compiler-automation/install-pytorch-kineto-from-src/index.md - - install-torchvision-from-src: Compiler-automation/install-torchvision-from-src/index.md - - install-tpp-pytorch-extension: Compiler-automation/install-tpp-pytorch-extension/index.md - - install-transformers-from-src: Compiler-automation/install-transformers-from-src/index.md - - CM Interface: - - get-cache-dir: CM-Interface/get-cache-dir/index.md - - Legacy CK support: - - get-ck: Legacy-CK-support/get-ck/index.md - - get-ck-repo-mlops: Legacy-CK-support/get-ck-repo-mlops/index.md - - AI-ML datasets: - - get-croissant: AI-ML-datasets/get-croissant/index.md - - get-dataset-cifar10: AI-ML-datasets/get-dataset-cifar10/index.md - - get-dataset-cnndm: AI-ML-datasets/get-dataset-cnndm/index.md - - get-dataset-coco: AI-ML-datasets/get-dataset-coco/index.md - - get-dataset-coco2014: AI-ML-datasets/get-dataset-coco2014/index.md - - get-dataset-criteo: AI-ML-datasets/get-dataset-criteo/index.md - - get-dataset-imagenet-aux: AI-ML-datasets/get-dataset-imagenet-aux/index.md - - get-dataset-imagenet-calibration: AI-ML-datasets/get-dataset-imagenet-calibration/index.md - - get-dataset-imagenet-helper: AI-ML-datasets/get-dataset-imagenet-helper/index.md - - get-dataset-imagenet-train: AI-ML-datasets/get-dataset-imagenet-train/index.md - - get-dataset-imagenet-val: AI-ML-datasets/get-dataset-imagenet-val/index.md - - get-dataset-kits19: AI-ML-datasets/get-dataset-kits19/index.md - - get-dataset-librispeech: AI-ML-datasets/get-dataset-librispeech/index.md - - get-dataset-openimages: AI-ML-datasets/get-dataset-openimages/index.md - - get-dataset-openimages-annotations: AI-ML-datasets/get-dataset-openimages-annotations/index.md - - get-dataset-openimages-calibration: AI-ML-datasets/get-dataset-openimages-calibration/index.md - - get-dataset-openorca: AI-ML-datasets/get-dataset-openorca/index.md - - get-dataset-squad: AI-ML-datasets/get-dataset-squad/index.md - - get-dataset-squad-vocab: AI-ML-datasets/get-dataset-squad-vocab/index.md - - get-preprocessed-dataset-criteo: AI-ML-datasets/get-preprocessed-dataset-criteo/index.md - - get-preprocessed-dataset-generic: AI-ML-datasets/get-preprocessed-dataset-generic/index.md - - get-preprocessed-dataset-imagenet: AI-ML-datasets/get-preprocessed-dataset-imagenet/index.md - - get-preprocessed-dataset-kits19: AI-ML-datasets/get-preprocessed-dataset-kits19/index.md - - get-preprocessed-dataset-librispeech: AI-ML-datasets/get-preprocessed-dataset-librispeech/index.md - - get-preprocessed-dataset-openimages: AI-ML-datasets/get-preprocessed-dataset-openimages/index.md - - get-preprocessed-dataset-openorca: AI-ML-datasets/get-preprocessed-dataset-openorca/index.md - - get-preprocessed-dataset-squad: AI-ML-datasets/get-preprocessed-dataset-squad/index.md - - CUDA automation: - - get-cuda: CUDA-automation/get-cuda/index.md - - get-cuda-devices: CUDA-automation/get-cuda-devices/index.md - - get-cudnn: CUDA-automation/get-cudnn/index.md - - get-tensorrt: CUDA-automation/get-tensorrt/index.md - - install-cuda-package-manager: CUDA-automation/install-cuda-package-manager/index.md - - install-cuda-prebuilt: CUDA-automation/install-cuda-prebuilt/index.md - - AI-ML frameworks: - - get-google-saxml: AI-ML-frameworks/get-google-saxml/index.md - - get-onnxruntime-prebuilt: AI-ML-frameworks/get-onnxruntime-prebuilt/index.md - - get-qaic-apps-sdk: AI-ML-frameworks/get-qaic-apps-sdk/index.md - - get-qaic-platform-sdk: AI-ML-frameworks/get-qaic-platform-sdk/index.md - - get-qaic-software-kit: AI-ML-frameworks/get-qaic-software-kit/index.md - - get-rocm: AI-ML-frameworks/get-rocm/index.md - - get-tvm: AI-ML-frameworks/get-tvm/index.md - - install-qaic-compute-sdk-from-src: AI-ML-frameworks/install-qaic-compute-sdk-from-src/index.md - - install-rocm: AI-ML-frameworks/install-rocm/index.md - - install-tensorflow-for-c: AI-ML-frameworks/install-tensorflow-for-c/index.md - - install-tensorflow-from-src: AI-ML-frameworks/install-tensorflow-from-src/index.md - - install-tflite-from-src: AI-ML-frameworks/install-tflite-from-src/index.md - - Reproducibility and artifact evaluation: - - get-ipol-src: Reproducibility-and-artifact-evaluation/get-ipol-src/index.md - - process-ae-users: Reproducibility-and-artifact-evaluation/process-ae-users/index.md - - reproduce-ipol-paper-2022-439: Reproducibility-and-artifact-evaluation/reproduce-ipol-paper-2022-439/index.md - - reproduce-micro-paper-2023-victima: Reproducibility-and-artifact-evaluation/reproduce-micro-paper-2023-victima/index.md - - GUI: - - gui: GUI/gui/index.md - - Collective benchmarking: - - launch-benchmark: Collective-benchmarking/launch-benchmark/index.md - - Tests: - - print-any-text: Tests/print-any-text/index.md - - print-croissant-desc: Tests/print-croissant-desc/index.md - - print-hello-world: Tests/print-hello-world/index.md - - print-hello-world-java: Tests/print-hello-world-java/index.md - - print-hello-world-javac: Tests/print-hello-world-javac/index.md - - print-hello-world-py: Tests/print-hello-world-py/index.md - - print-python-version: Tests/print-python-version/index.md - - run-python: Tests/run-python/index.md - - test-cm-core: Tests/test-cm-core/index.md - - test-cm-script-pipeline: Tests/test-cm-script-pipeline/index.md - - test-deps-conditions: Tests/test-deps-conditions/index.md - - test-deps-conditions2: Tests/test-deps-conditions2/index.md - - test-download-and-extract-artifacts: Tests/test-download-and-extract-artifacts/index.md - - test-set-sys-user-cm: Tests/test-set-sys-user-cm/index.md - - upgrade-python-pip: Tests/upgrade-python-pip/index.md - - Dashboard automation: - - publish-results-to-dashboard: Dashboard-automation/publish-results-to-dashboard/index.md - - Remote automation: - - remote-run-commands: Remote-automation/remote-run-commands/index.md - - CM interface prototyping: - - test-debug: CM-interface-prototyping/test-debug/index.md - - test-mlperf-inference-retinanet: CM-interface-prototyping/test-mlperf-inference-retinanet/index.md + - Python-automation: Python-automation/index.md + - MLPerf-benchmark-support: MLPerf-benchmark-support/index.md + - Modular-AI-ML-application-pipeline: Modular-AI-ML-application-pipeline/index.md + - Modular-application-pipeline: Modular-application-pipeline/index.md + - Modular-MLPerf-inference-benchmark-pipeline: Modular-MLPerf-inference-benchmark-pipeline/index.md + - Modular-MLPerf-benchmarks: Modular-MLPerf-benchmarks/index.md + - Reproduce-MLPerf-benchmarks: Reproduce-MLPerf-benchmarks/index.md + - Modular-MLPerf-training-benchmark-pipeline: Modular-MLPerf-training-benchmark-pipeline/index.md + - DevOps-automation: DevOps-automation/index.md + - Docker-automation: Docker-automation/index.md + - AI-ML-optimization: AI-ML-optimization/index.md + - AI-ML-models: AI-ML-models/index.md + - CM-automation: CM-automation/index.md + - TinyML-automation: TinyML-automation/index.md + - Cloud-automation: Cloud-automation/index.md + - Platform-information: Platform-information/index.md + - Detection-or-installation-of-tools-and-artifacts: Detection-or-installation-of-tools-and-artifacts/index.md + - Compiler-automation: Compiler-automation/index.md + - CM-Interface: CM-Interface/index.md + - Legacy-CK-support: Legacy-CK-support/index.md + - AI-ML-datasets: AI-ML-datasets/index.md + - CUDA-automation: CUDA-automation/index.md + - AI-ML-frameworks: AI-ML-frameworks/index.md + - Reproducibility-and-artifact-evaluation: Reproducibility-and-artifact-evaluation/index.md + - GUI: GUI/index.md + - Collective-benchmarking: Collective-benchmarking/index.md + - Tests: Tests/index.md + - Dashboard-automation: Dashboard-automation/index.md + - Remote-automation: Remote-automation/index.md + - CM-interface-prototyping: CM-interface-prototyping/index.md markdown_extensions: - pymdownx.tasklist: diff --git a/mkdocsHelper.py b/mkdocsHelper.py deleted file mode 100644 index 489acb959..000000000 --- a/mkdocsHelper.py +++ /dev/null @@ -1,87 +0,0 @@ -import os -import json -import yaml -import shutil - -def get_category_from_file(file_path): - try: - with open(file_path, 'r') as file: - if file_path.endswith('.json'): - data = json.load(file) - elif file_path.endswith('.yaml') or file_path.endswith('.yml'): - data = yaml.safe_load(file) - else: - return None - return data.get('category') - except Exception as e: - print(f"Error reading {file_path}: {e}") - return None - -def scan_folders(parent_folder): - category_dict = {} - parent_folder = os.path.join(parent_folder,"script") - for folder_name in os.listdir(parent_folder): - folder_path = os.path.join(parent_folder, folder_name) - if os.path.isdir(folder_path): - cm_file_path_json = os.path.join(folder_path, '_cm.json') - cm_file_path_yaml = os.path.join(folder_path, '_cm.yaml') - category = None - - if os.path.isfile(cm_file_path_json): - category = get_category_from_file(cm_file_path_json) - elif os.path.isfile(cm_file_path_yaml): - category = get_category_from_file(cm_file_path_yaml) - - if category: - if category not in category_dict: - category_dict[category] = [] - category_dict[category].append(folder_name) - - return category_dict - -def print_category_structure(category_dict): - # print(" - CM Scripts:") - for category, folders in category_dict.items(): - category_path = os.path.join("docs", category.replace("/", "-")) - # category_path_formated = category_path.replace("/", "-") - category_path_formated = category_path.replace(" ", "-") - if not os.path.exists(category_path_formated): - os.makedirs(category_path_formated) - # print(f" - {category}:") - for folder in folders: - folder_name = folder.replace("/", "-") - source_path_folder = os.path.join("script", folder_name) - source_file_path = os.path.join(source_path_folder, "README.md") - target_path = os.path.join(category_path_formated, os.path.join(folder_name, "index.md")) - if not os.path.exists(source_file_path): - # print(f"Source file does not exist: {source_file_path}") - continue - if not os.path.exists(os.path.dirname(target_path)): - os.makedirs(os.path.dirname(target_path)) - if os.path.exists(target_path): - # print(f"Target file already exists: {target_path}") - continue - try: - print(source_file_path) - print(target_path) - print(os.getcwd()) - shutil.copyfile(source_file_path, target_path) - # os.symlink(source_file_path, target_path) - # print(f" - {folder_name}:{target_path}") - except OSError as e: - print(f"Failed to create symlink: {e}") - print(" - CM Scripts:") - for category, folders in category_dict.items(): - # category_path = os.path.join("docs", category) - category_path_formated = category.replace("/", "-") - category_path_formated = category_path_formated.replace(" ", "-") - print(f" - {category.replace("/", "-")}:") - for folder in folders: - folder_name = folder.replace("/", "-") - target_path = os.path.join(category_path_formated, os.path.join(folder_name, "index.md")) - print(f" - {folder_name}: {target_path}") - -if __name__ == "__main__": - parent_folder = r"" # Replace with the actual path to the parent folder - category_dict = scan_folders(parent_folder) - print_category_structure(category_dict)