├── .github ├── CODEOWNERS └── workflows │ ├── build_wheel.yml │ ├── check-broken-links.yml │ ├── cla.yml │ ├── code-review.yml │ ├── format.yml │ ├── publish-docs.yaml │ ├── run-individual-script-tests.yml │ ├── test-amd-mlperf-inference-implementations.yml │ ├── test-cm-based-submission-generation.yml │ ├── test-cm-script-features.yml │ ├── test-cm-tutorial-retinanet.yml │ ├── test-cm-tutorial-tvm-pip.yml │ ├── test-cm-tutorial-tvm.yml │ ├── test-cm4mlops-wheel-macos.yml │ ├── test-cm4mlops-wheel-ubuntu.yml │ ├── test-cm4mlops-wheel-windows.yml │ ├── test-image-classification-onnx.yml │ ├── test-intel-mlperf-inference-implementations.yml │ ├── test-mlperf-inference-abtf-poc.yml │ ├── test-mlperf-inference-bert-deepsparse-tf-onnxruntime-pytorch.yml │ ├── test-mlperf-inference-dlrm.yml │ ├── test-mlperf-inference-gptj.yml │ ├── test-mlperf-inference-llama2.yml │ ├── test-mlperf-inference-mixtral.yml │ ├── test-mlperf-inference-mlcommons-cpp-resnet50.yml │ ├── test-mlperf-inference-resnet50.yml │ ├── test-mlperf-inference-retinanet.yml │ ├── test-mlperf-inference-rnnt.yml │ ├── test-mlperf-inference-sdxl.yaml │ ├── test-mlperf-inference-tvm.yml │ ├── test-mlperf-loadgen-onnx-huggingface-bert-fp32-squad.yml │ ├── test-nvidia-mlperf-inference-implementations.yml │ ├── test-qaic-compute-sdk-build.yml │ ├── test-qaic-software-kit.yml │ └── test-scc24-sdxl.yaml ├── .gitignore ├── CHANGES.md ├── CONTRIBUTING.md ├── COPYRIGHT.md ├── HISTORY.md ├── LICENSE.md ├── LICENSE.third-party.md ├── MANIFEST.in ├── README.md ├── VERSION ├── automation ├── COPYRIGHT.md ├── cache │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.json │ ├── module.py │ └── module_misc.py ├── cfg │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.json │ └── module.py ├── challenge │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.json │ └── module.py ├── contributor │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.json │ └── module.py ├── data │ ├── COPYRIGHT.md │ ├── _cm.json │ └── module.py ├── docker │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.json │ └── module.py ├── docs │ ├── COPYRIGHT.md │ ├── _cm.json │ └── module.py ├── experiment │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.json │ ├── module.py │ └── tests │ │ ├── test2.bat │ │ ├── test2.sh │ │ ├── test3.bat │ │ ├── test3.sh │ │ ├── test3_input.yaml │ │ ├── test__json.bat │ │ ├── test__json.sh │ │ ├── test__yaml.bat │ │ ├── test__yaml.sh │ │ ├── test_input.json │ │ ├── test_input.yaml │ │ ├── test_run.bat │ │ └── test_run.sh ├── project │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.json │ └── module.py ├── report │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.json │ └── module.py ├── script │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README-specs.md │ ├── README.md │ ├── _cm.json │ ├── assets │ │ └── scripts-workflow.png │ ├── docker_repro_example │ │ ├── ubuntu-23.04.Dockerfile │ │ ├── ubuntu-23.04.Dockerfile.build.bat │ │ ├── ubuntu-23.04.Dockerfile.build.sh │ │ ├── ubuntu-23.04.Dockerfile.run.bat │ │ └── ubuntu-23.04.Dockerfile.run.sh │ ├── module.py │ ├── module_help.py │ ├── module_misc.py │ ├── template-ae-python │ │ ├── README-extra.md │ │ ├── _cm.yaml │ │ ├── analyze.bat │ │ ├── analyze.sh │ │ ├── customize.py │ │ ├── install_deps.bat │ │ ├── install_deps.sh │ │ ├── main.py │ │ ├── plot.bat │ │ ├── plot.sh │ │ ├── reproduce.bat │ │ ├── reproduce.sh │ │ ├── run.bat │ │ ├── run.sh │ │ ├── validate.bat │ │ └── validate.sh │ ├── template-python │ │ ├── README-extra.md │ │ ├── _cm.yaml │ │ ├── customize.py │ │ ├── main.py │ │ ├── requirements.txt │ │ ├── run.bat │ │ └── run.sh │ ├── template-pytorch │ │ ├── README-extra.md │ │ ├── _cm.yaml │ │ ├── customize.py │ │ ├── main.py │ │ ├── requirements.txt │ │ ├── run.bat │ │ └── run.sh │ ├── template │ │ ├── README-extra.md │ │ ├── customize.py │ │ ├── run.bat │ │ └── run.sh │ └── template_list_of_scripts.md └── utils │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.json │ ├── module.py │ └── module_cfg.py ├── cfg ├── benchmark-hardware-compute │ ├── _cm.json │ ├── amd-cpu-x64.json │ ├── amd-gpu.json │ ├── generic-cpu-arm64.json │ ├── google-tpu.json │ ├── habana-gaudi.json │ ├── intel-cpu-x64.json │ ├── nvidia-gpu-jetson-orin.yaml │ ├── nvidia-gpu.json │ ├── qualcomm-ai100.json │ └── stm-32L4R5ZIT6U-NUCLEO-L4R5ZI.yaml ├── benchmark-list │ ├── _cm.json │ ├── loadgen-cpp.yaml │ ├── loadgen-python.yaml │ ├── mlperf-abtf.yaml │ ├── mlperf-inference.yaml │ ├── mlperf-mobile.yaml │ ├── mlperf-tiny.yaml │ └── mlperf-training.yaml ├── benchmark-run-mlperf-inference-v3.1 │ ├── _cm.yaml │ ├── run-005147815bf840b8-input.json │ ├── run-005147815bf840b8-meta.json │ ├── run-005147815bf840b8-output.json │ ├── run-0eeb9799b12b488f-input.json │ ├── run-0eeb9799b12b488f-meta.json │ ├── run-0eeb9799b12b488f-output.json │ ├── run-52c1d43172664ed0-input.json │ ├── run-52c1d43172664ed0-meta.json │ ├── run-52c1d43172664ed0-output.json │ ├── run-66cce585ff0242bc-input.json │ ├── run-66cce585ff0242bc-meta.json │ ├── run-6a07cf881dee462a-input.json │ ├── run-6a07cf881dee462a-meta.json │ ├── run-7d80f464b2274742-input.json │ ├── run-7d80f464b2274742-meta.json │ ├── run-7d80f464b2274742-output.json │ ├── run-7f094c244ebb4985-input.json │ ├── run-7f094c244ebb4985-meta.json │ ├── run-7f094c244ebb4985-output.json │ ├── run-7f094c244ebb4985.md │ ├── run-d5b6b5af6d794045-input.json │ ├── run-d5b6b5af6d794045-meta.json │ ├── run-d8c0f02f52bf49ae-input.json │ ├── run-d8c0f02f52bf49ae-meta.json │ ├── run-d8c0f02f52bf49ae-output.json │ ├── run-df843c22cbf54aaf-input.json │ ├── run-df843c22cbf54aaf-meta.json │ ├── run-df843c22cbf54aaf-output.json │ ├── run-df843c22cbf54aaf.md │ ├── run-f05147815bf840b8-input.json │ ├── run-f05147815bf840b8-meta.json │ └── run-f05147815bf840b8-output.json ├── benchmark-run-mlperf-inference-v4.0 │ └── _cm.yaml ├── benchmark-run-mlperf-inference-v4.1 │ └── _cm.yaml └── docker-basic-configurations │ ├── _cm.yaml │ ├── basic-archlinux.yaml │ ├── basic-rhel-9.yaml │ ├── basic-ubuntu-20.04.yaml │ ├── basic-ubuntu-22.04.yaml │ ├── basic-ubuntu-23.04.yaml │ ├── basic-ubuntu-24.04.yaml │ ├── nvidia-ubuntu-20.04-cuda-11.8-cudnn-8.6.0-pytorch-1.13.0.yaml │ ├── nvidia-ubuntu-22.04-cuda-12.1-cudnn-8.9.1-pytorch-2.0.0.yaml │ ├── nvidia-ubuntu-22.04-cuda-12.4-cudnn-9.0.0-pytorch-2.3.0.yaml │ ├── nvidia-ubuntu-22.04-cuda-12.5-cudnn-9.1.0-pytorch-2.4.0.yaml │ └── nvidia-ubuntu-22.04-cuda-12.6-cudnn-9.3.0-pytorch-2.5.0.yaml ├── challenge ├── add-derived-metrics-to-mlperf-inference │ ├── README.md │ └── _cm.json ├── automate-mlperf-inference-v3.1-and-v4.0-2024 │ ├── README.md │ └── _cm.yaml ├── compose-high-performance-and-cost-efficient-ai-systems-based-on-mlperf-4.0-2024 │ ├── README.md │ └── _cm.yaml ├── connect-mlperf-inference-v3.1-with-openbenchmarking │ ├── README.md │ └── _cm.json ├── connect-mlperf-with-medperf │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-scc2023 │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-scc2024 │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-v2.1-2022 │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-v3.0-2023 │ ├── README.md │ ├── _cm.json │ └── docs │ │ ├── crowd-benchmark-mlperf-bert-inference-cuda.md │ │ ├── generate-3d-unet-submission.md │ │ ├── generate-bert-submission.md │ │ ├── generate-resnet50-submission.md │ │ ├── generate-retinanet-submission.md │ │ ├── generate-rnnt-submission.md │ │ ├── setup-aws-instance.md │ │ ├── setup-gcp-instance.md │ │ └── setup-nvidia-jetson-orin.md ├── optimize-mlperf-inference-v3.1-2023 │ ├── README.md │ ├── _cm.json │ └── docs │ │ ├── generate-3d-unet-submission.md │ │ ├── generate-bert-submission.md │ │ ├── generate-resnet50-submission.md │ │ ├── generate-retinanet-submission.md │ │ ├── generate-rnnt-submission.md │ │ ├── setup-aws-instance.md │ │ ├── setup-gcp-instance.md │ │ └── setup-nvidia-jetson-orin.md ├── optimize-mlperf-inference-v3.1-amazon-inferentia-2023 │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-v3.1-create-end-to-end-app │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-v3.1-deepsparse │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-v3.1-google-tpu-2023 │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-v3.1-hugging-face-models-2023 │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-v3.1-intel-2023 │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-v3.1-modular-mojo-2023 │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-v3.1-qualcomm-ai100-2023 │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-v3.1-tvm-2023 │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-v3.1-universal-cpp-implementation-2023 │ ├── README.md │ └── _cm.json ├── optimize-mlperf-inference-v3.1-windows-2023 │ ├── README.md │ └── _cm.json ├── repro-mlperf-inf-v3.0-orin │ ├── README.md │ └── _cm.json ├── repro-mlperf-inference-retinanet-scc2022 │ ├── README.md │ └── _cm.json ├── repro-mlperf-inference-v4.0-2024 │ ├── README.md │ └── _cm.yaml ├── repro-mlperf-inference-v4.1-2024 │ ├── README.md │ └── _cm.yaml ├── reproduce-and-automate-tinymlperf-v1.1-2023 │ ├── README.md │ └── _cm.json ├── reproduce-mlperf-training-v3.0-2023 │ ├── README.md │ └── _cm.json ├── run-mlperf@home-v3.1-cpu │ ├── README.md │ ├── _cm.json │ ├── run-cpu-bert-99-deepsparse.md │ └── run-cpu-dse-mobilenets-efficientnets-tflite.md ├── run-mlperf@home-v3.1-gpu │ ├── README.md │ ├── _cm.json │ ├── run-nvidia-gpu-bert-99-nvidia-docker-tensorrt.md │ └── run-nvidia-gpu-gpt-j-6b-ref-pytorch.md └── train-llm-for-cm-mlperf-2023 │ ├── README.md │ └── _cm.json ├── cmr.yaml ├── debug.py ├── docs ├── cm-yaml-guide.md ├── getting-started.md ├── img │ ├── logo_v2.svg │ └── pages (80).png ├── index.md ├── requirements.txt └── scripts │ ├── AI-ML-datasets │ ├── get-croissant │ │ └── index.md │ ├── get-dataset-cifar10 │ │ └── index.md │ ├── get-dataset-cnndm │ │ └── index.md │ ├── get-dataset-coco │ │ └── index.md │ ├── get-dataset-coco2014 │ │ └── index.md │ ├── get-dataset-criteo │ │ └── index.md │ ├── get-dataset-imagenet-aux │ │ └── index.md │ ├── get-dataset-imagenet-calibration │ │ └── index.md │ ├── get-dataset-imagenet-helper │ │ └── index.md │ ├── get-dataset-imagenet-train │ │ └── index.md │ ├── get-dataset-imagenet-val │ │ └── index.md │ ├── get-dataset-kits19 │ │ └── index.md │ ├── get-dataset-librispeech │ │ └── index.md │ ├── get-dataset-openimages-annotations │ │ └── index.md │ ├── get-dataset-openimages-calibration │ │ └── index.md │ ├── get-dataset-openimages │ │ └── index.md │ ├── get-dataset-openorca │ │ └── index.md │ ├── get-dataset-squad-vocab │ │ └── index.md │ ├── get-dataset-squad │ │ └── index.md │ ├── get-preprocessed-dataset-criteo │ │ └── index.md │ ├── get-preprocessed-dataset-generic │ │ └── index.md │ ├── get-preprocessed-dataset-imagenet │ │ └── index.md │ ├── get-preprocessed-dataset-kits19 │ │ └── index.md │ ├── get-preprocessed-dataset-librispeech │ │ └── index.md │ ├── get-preprocessed-dataset-openimages │ │ └── index.md │ ├── get-preprocessed-dataset-openorca │ │ └── index.md │ ├── get-preprocessed-dataset-squad │ │ └── index.md │ └── index.md │ ├── AI-ML-frameworks │ ├── get-google-saxml │ │ └── index.md │ ├── get-onnxruntime-prebuilt │ │ └── index.md │ ├── get-qaic-apps-sdk │ │ └── index.md │ ├── get-qaic-platform-sdk │ │ └── index.md │ ├── get-qaic-software-kit │ │ └── index.md │ ├── get-rocm │ │ └── index.md │ ├── get-tvm │ │ └── index.md │ ├── index.md │ ├── install-qaic-compute-sdk-from-src │ │ └── index.md │ ├── install-rocm │ │ └── index.md │ ├── install-tensorflow-for-c │ │ └── index.md │ ├── install-tensorflow-from-src │ │ └── index.md │ └── install-tflite-from-src │ │ └── index.md │ ├── AI-ML-models │ ├── convert-ml-model-huggingface-to-onnx │ │ └── index.md │ ├── get-bert-squad-vocab │ │ └── index.md │ ├── get-dlrm │ │ └── index.md │ ├── get-ml-model-3d-unet-kits19 │ │ └── index.md │ ├── get-ml-model-bert-base-squad │ │ └── index.md │ ├── get-ml-model-bert-large-squad │ │ └── index.md │ ├── get-ml-model-dlrm-terabyte │ │ └── index.md │ ├── get-ml-model-efficientnet-lite │ │ └── index.md │ ├── get-ml-model-gptj │ │ └── index.md │ ├── get-ml-model-huggingface-zoo │ │ └── index.md │ ├── get-ml-model-llama2 │ │ └── index.md │ ├── get-ml-model-mobilenet │ │ └── index.md │ ├── get-ml-model-neuralmagic-zoo │ │ └── index.md │ ├── get-ml-model-resnet50 │ │ └── index.md │ ├── get-ml-model-retinanet-nvidia │ │ └── index.md │ ├── get-ml-model-retinanet │ │ └── index.md │ ├── get-ml-model-rnnt │ │ └── index.md │ ├── get-ml-model-stable-diffusion │ │ └── index.md │ ├── get-ml-model-tiny-resnet │ │ └── index.md │ ├── get-ml-model-using-imagenet-from-model-zoo │ │ └── index.md │ ├── get-tvm-model │ │ └── index.md │ └── index.md │ ├── AI-ML-optimization │ ├── calibrate-model-for.qaic │ │ └── index.md │ ├── compile-model-for.qaic │ │ └── index.md │ ├── index.md │ └── prune-bert-models │ │ └── index.md │ ├── CM-Interface │ ├── get-cache-dir │ │ └── index.md │ └── index.md │ ├── CM-automation │ ├── create-custom-cache-entry │ │ └── index.md │ └── index.md │ ├── CM-interface-prototyping │ ├── index.md │ ├── test-debug │ │ └── index.md │ └── test-mlperf-inference-retinanet │ │ └── index.md │ ├── CUDA-automation │ ├── get-cuda-devices │ │ └── index.md │ ├── get-cuda │ │ └── index.md │ ├── get-cudnn │ │ └── index.md │ ├── get-tensorrt │ │ └── index.md │ ├── index.md │ ├── install-cuda-package-manager │ │ └── index.md │ └── install-cuda-prebuilt │ │ └── index.md │ ├── Cloud-automation │ ├── destroy-terraform │ │ └── index.md │ ├── get-aws-cli │ │ └── index.md │ ├── get-terraform │ │ └── index.md │ ├── index.md │ ├── install-aws-cli │ │ └── index.md │ ├── install-terraform-from-src │ │ └── index.md │ └── run-terraform │ │ └── index.md │ ├── Collective-benchmarking │ ├── index.md │ └── launch-benchmark │ │ └── index.md │ ├── Compiler-automation │ ├── get-aocl │ │ └── index.md │ ├── get-cl │ │ └── index.md │ ├── get-compiler-flags │ │ └── index.md │ ├── get-compiler-rust │ │ └── index.md │ ├── get-gcc │ │ └── index.md │ ├── get-go │ │ └── index.md │ ├── get-llvm │ │ └── index.md │ ├── index.md │ ├── install-gcc-src │ │ └── index.md │ ├── install-ipex-from-src │ │ └── index.md │ ├── install-llvm-prebuilt │ │ └── index.md │ ├── install-llvm-src │ │ └── index.md │ ├── install-onednn-from-src │ │ └── index.md │ ├── install-onnxruntime-from-src │ │ └── index.md │ ├── install-pytorch-from-src │ │ └── index.md │ ├── install-pytorch-kineto-from-src │ │ └── index.md │ ├── install-torchvision-from-src │ │ └── index.md │ ├── install-tpp-pytorch-extension │ │ └── index.md │ └── install-transformers-from-src │ │ └── index.md │ ├── Dashboard-automation │ ├── index.md │ └── publish-results-to-dashboard │ │ └── index.md │ ├── Detection-or-installation-of-tools-and-artifacts │ ├── get-android-sdk │ │ └── index.md │ ├── get-aria2 │ │ └── index.md │ ├── get-bazel │ │ └── index.md │ ├── get-blis │ │ └── index.md │ ├── get-brew │ │ └── index.md │ ├── get-cmake │ │ └── index.md │ ├── get-cmsis_5 │ │ └── index.md │ ├── get-docker │ │ └── index.md │ ├── get-generic-sys-util │ │ └── index.md │ ├── get-google-test │ │ └── index.md │ ├── get-java │ │ └── index.md │ ├── get-javac │ │ └── index.md │ ├── get-lib-armnn │ │ └── index.md │ ├── get-lib-dnnl │ │ └── index.md │ ├── get-lib-protobuf │ │ └── index.md │ ├── get-lib-qaic-api │ │ └── index.md │ ├── get-nvidia-docker │ │ └── index.md │ ├── get-openssl │ │ └── index.md │ ├── get-rclone │ │ └── index.md │ ├── get-sys-utils-cm │ │ └── index.md │ ├── get-sys-utils-min │ │ └── index.md │ ├── get-xilinx-sdk │ │ └── index.md │ ├── get-zendnn │ │ └── index.md │ ├── index.md │ ├── install-bazel │ │ └── index.md │ ├── install-cmake-prebuilt │ │ └── index.md │ ├── install-gflags │ │ └── index.md │ ├── install-github-cli │ │ └── index.md │ ├── install-intel-neural-speed-from-src │ │ └── index.md │ ├── install-numactl-from-src │ │ └── index.md │ └── install-openssl │ │ └── index.md │ ├── DevOps-automation │ ├── benchmark-program │ │ └── index.md │ ├── compile-program │ │ └── index.md │ ├── convert-csv-to-md │ │ └── index.md │ ├── copy-to-clipboard │ │ └── index.md │ ├── create-conda-env │ │ └── index.md │ ├── create-patch │ │ └── index.md │ ├── detect-sudo │ │ └── index.md │ ├── download-and-extract │ │ └── index.md │ ├── download-file │ │ └── index.md │ ├── download-torrent │ │ └── index.md │ ├── extract-file │ │ └── index.md │ ├── fail │ │ └── index.md │ ├── get-conda │ │ └── index.md │ ├── get-git-repo │ │ └── index.md │ ├── get-github-cli │ │ └── index.md │ ├── index.md │ ├── pull-git-repo │ │ └── index.md │ ├── push-csv-to-spreadsheet │ │ └── index.md │ ├── set-device-settings-qaic │ │ └── index.md │ ├── set-echo-off-win │ │ └── index.md │ ├── set-performance-mode │ │ └── index.md │ ├── set-sqlite-dir │ │ └── index.md │ └── tar-my-folder │ │ └── index.md │ ├── Docker-automation │ ├── build-docker-image │ │ └── index.md │ ├── build-dockerfile │ │ └── index.md │ ├── index.md │ ├── prune-docker │ │ └── index.md │ └── run-docker-container │ │ └── index.md │ ├── GUI │ ├── gui │ │ └── index.md │ └── index.md │ ├── Legacy-CK-support │ ├── get-ck-repo-mlops │ │ └── index.md │ ├── get-ck │ │ └── index.md │ └── index.md │ ├── MLPerf-benchmark-support │ ├── add-custom-nvidia-system │ │ └── index.md │ ├── benchmark-any-mlperf-inference-implementation │ │ └── index.md │ ├── build-mlperf-inference-server-nvidia │ │ └── index.md │ ├── generate-mlperf-inference-submission │ │ └── index.md │ ├── generate-mlperf-inference-user-conf │ │ └── index.md │ ├── generate-mlperf-tiny-report │ │ └── index.md │ ├── generate-mlperf-tiny-submission │ │ └── index.md │ ├── generate-nvidia-engine │ │ └── index.md │ ├── get-mlperf-inference-intel-scratch-space │ │ └── index.md │ ├── get-mlperf-inference-loadgen │ │ └── index.md │ ├── get-mlperf-inference-nvidia-common-code │ │ └── index.md │ ├── get-mlperf-inference-nvidia-scratch-space │ │ └── index.md │ ├── get-mlperf-inference-results-dir │ │ └── index.md │ ├── get-mlperf-inference-results │ │ └── index.md │ ├── get-mlperf-inference-src │ │ └── index.md │ ├── get-mlperf-inference-submission-dir │ │ └── index.md │ ├── get-mlperf-inference-sut-configs │ │ └── index.md │ ├── get-mlperf-inference-sut-description │ │ └── index.md │ ├── get-mlperf-logging │ │ └── index.md │ ├── get-mlperf-power-dev │ │ └── index.md │ ├── get-mlperf-tiny-eembc-energy-runner-src │ │ └── index.md │ ├── get-mlperf-tiny-src │ │ └── index.md │ ├── get-mlperf-training-nvidia-code │ │ └── index.md │ ├── get-mlperf-training-src │ │ └── index.md │ ├── get-nvidia-mitten │ │ └── index.md │ ├── get-spec-ptd │ │ └── index.md │ ├── import-mlperf-inference-to-experiment │ │ └── index.md │ ├── import-mlperf-tiny-to-experiment │ │ └── index.md │ ├── import-mlperf-training-to-experiment │ │ └── index.md │ ├── index.md │ ├── install-mlperf-logging-from-src │ │ └── index.md │ ├── prepare-training-data-bert │ │ └── index.md │ ├── prepare-training-data-resnet │ │ └── index.md │ ├── preprocess-mlperf-inference-submission │ │ └── index.md │ ├── process-mlperf-accuracy │ │ └── index.md │ ├── push-mlperf-inference-results-to-github │ │ └── index.md │ ├── run-mlperf-inference-mobilenet-models │ │ └── index.md │ ├── run-mlperf-inference-submission-checker │ │ └── index.md │ ├── run-mlperf-power-client │ │ └── index.md │ ├── run-mlperf-power-server │ │ └── index.md │ ├── run-mlperf-training-submission-checker │ │ └── index.md │ └── truncate-mlperf-inference-accuracy-log │ │ └── index.md │ ├── Modular-AI-ML-application-pipeline │ ├── app-image-classification-onnx-py │ │ └── index.md │ ├── app-image-classification-tf-onnx-cpp │ │ └── index.md │ ├── app-image-classification-torch-py │ │ └── index.md │ ├── app-image-classification-tvm-onnx-py │ │ └── index.md │ ├── app-stable-diffusion-onnx-py │ │ └── index.md │ └── index.md │ ├── Modular-MLPerf-benchmarks │ ├── app-mlperf-inference-dummy │ │ └── index.md │ ├── app-mlperf-inference-intel │ │ └── index.md │ ├── app-mlperf-inference-qualcomm │ │ └── index.md │ └── index.md │ ├── Modular-MLPerf-inference-benchmark-pipeline │ ├── app-loadgen-generic-python │ │ └── index.md │ ├── app-mlperf-inference-ctuning-cpp-tflite │ │ └── index.md │ ├── app-mlperf-inference-mlcommons-cpp │ │ └── index.md │ ├── app-mlperf-inference-mlcommons-python │ │ └── index.md │ ├── app-mlperf-inference │ │ └── index.md │ ├── benchmark-program-mlperf │ │ └── index.md │ ├── index.md │ └── run-mlperf-inference-app │ │ └── index.md │ ├── Modular-MLPerf-training-benchmark-pipeline │ ├── app-mlperf-training-nvidia │ │ └── index.md │ ├── app-mlperf-training-reference │ │ └── index.md │ └── index.md │ ├── Modular-application-pipeline │ ├── app-image-corner-detection │ │ └── index.md │ └── index.md │ ├── Platform-information │ ├── detect-cpu │ │ └── index.md │ ├── detect-os │ │ └── index.md │ └── index.md │ ├── Python-automation │ ├── activate-python-venv │ │ └── index.md │ ├── get-generic-python-lib │ │ └── index.md │ ├── get-python3 │ │ └── index.md │ ├── index.md │ ├── install-generic-conda-package │ │ └── index.md │ ├── install-python-src │ │ └── index.md │ └── install-python-venv │ │ └── index.md │ ├── Remote-automation │ ├── index.md │ └── remote-run-commands │ │ └── index.md │ ├── Reproduce-MLPerf-benchmarks │ ├── app-mlperf-inference-nvidia │ │ └── index.md │ ├── index.md │ ├── reproduce-mlperf-octoml-tinyml-results │ │ └── index.md │ ├── reproduce-mlperf-training-nvidia │ │ └── index.md │ └── wrapper-reproduce-octoml-tinyml-submission │ │ └── index.md │ ├── Reproducibility-and-artifact-evaluation │ ├── get-ipol-src │ │ └── index.md │ ├── index.md │ ├── process-ae-users │ │ └── index.md │ ├── reproduce-ipol-paper-2022-439 │ │ └── index.md │ └── reproduce-micro-paper-2023-victima │ │ └── index.md │ ├── Tests │ ├── index.md │ ├── print-any-text │ │ └── index.md │ ├── print-croissant-desc │ │ └── index.md │ ├── print-hello-world-java │ │ └── index.md │ ├── print-hello-world-javac │ │ └── index.md │ ├── print-hello-world-py │ │ └── index.md │ ├── print-hello-world │ │ └── index.md │ ├── print-python-version │ │ └── index.md │ ├── run-python │ │ └── index.md │ ├── test-cm-core │ │ └── index.md │ ├── test-cm-script-pipeline │ │ └── index.md │ ├── test-deps-conditions │ │ └── index.md │ ├── test-deps-conditions2 │ │ └── index.md │ ├── test-download-and-extract-artifacts │ │ └── index.md │ ├── test-set-sys-user-cm │ │ └── index.md │ └── upgrade-python-pip │ │ └── index.md │ ├── TinyML-automation │ ├── create-fpgaconvnet-app-tinyml │ │ └── index.md │ ├── create-fpgaconvnet-config-tinyml │ │ └── index.md │ ├── flash-tinyml-binary │ │ └── index.md │ ├── get-microtvm │ │ └── index.md │ ├── get-zephyr-sdk │ │ └── index.md │ ├── get-zephyr │ │ └── index.md │ └── index.md │ └── index.md ├── get_git_version.py ├── git_commit_hash.txt ├── mkdocs.yml ├── pyproject.toml ├── report ├── mlperf-inference-v3.1-analysis-ctuning │ ├── README.md │ └── _cm.json ├── mlperf-inference-v3.1-press-release-ctuning │ └── _cm.json ├── mlperf-inference-v3.1-press-release-hpcwire │ └── _cm.json └── mlperf-inference-v4.0-press-release-ctuning │ └── _cm.json ├── requirements.txt ├── script ├── COPYRIGHT.md ├── README.md ├── activate-python-venv │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── add-custom-nvidia-system │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── app-image-classification-onnx-py │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── img │ │ └── computer_mouse.jpg │ ├── requirements.txt │ ├── run.bat │ ├── run.sh │ ├── src │ │ └── onnx_classify.py │ └── tests │ │ └── README.md ├── app-image-classification-tf-onnx-cpp │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── include │ │ └── benchmark.h │ ├── run.sh │ └── src │ │ └── classification.cpp ├── app-image-classification-torch-py │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── img │ │ └── computer_mouse.jpg │ ├── requirements.txt │ ├── run.bat │ ├── run.sh │ └── src │ │ └── pytorch_classify_preprocessed.py ├── app-image-classification-tvm-onnx-py │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── img │ │ └── computer_mouse.jpg │ ├── requirements.txt │ ├── run.sh │ └── src │ │ └── classify.py ├── app-image-corner-detection │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.sh │ └── susan.c ├── app-loadgen-generic-python │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ ├── run.sh │ ├── src │ │ ├── backend_onnxruntime.py │ │ ├── backend_pytorch.py │ │ ├── loadgen │ │ │ ├── harness.py │ │ │ ├── model.py │ │ │ └── runners.py │ │ ├── main.py │ │ └── utils.py │ └── tests │ │ └── modular-cm-containers │ │ ├── _common.bat │ │ ├── _common.sh │ │ ├── build.bat │ │ ├── build.sh │ │ ├── loadgen-generic-python--ubuntu-cpu.Dockerfile │ │ ├── loadgen-generic-python-auto.Dockerfile │ │ ├── run.bat │ │ └── run.sh ├── app-mlperf-inference-amd │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-llama2.sh │ └── run.sh ├── app-mlperf-inference-ctuning-cpp-tflite │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── armnn │ │ └── classification.cpp │ ├── customize.py │ ├── inc │ │ └── benchmark.h │ └── src │ │ └── classification.cpp ├── app-mlperf-inference-dummy │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── app-mlperf-inference-intel │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── build_bert_harness.sh │ ├── build_gptj_harness.sh │ ├── build_resnet50_harness.sh │ ├── build_retinanet_harness.sh │ ├── build_sdxl_harness.sh │ ├── calibrate_dlrm_v2_model.sh │ ├── calibrate_gptj_int4_model.sh │ ├── compile_resnet50.sh │ ├── compile_retinanet.sh │ ├── customize.py │ ├── prepare_3d-unet_data_model.sh │ ├── prepare_imagenet_calibration.sh │ ├── run_3d-unet_harness.sh │ ├── run_bert_harness.sh │ ├── run_dlrm_v2_harness.sh │ ├── run_gptj_harness_v3_1.sh │ ├── run_gptj_harness_v4_0.sh │ ├── run_resnet50_harness.sh │ ├── run_retinanet_harness.sh │ └── run_sdxl_harness.sh ├── app-mlperf-inference-mlcommons-cpp │ ├── CONTRIBUTING.md │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── inc │ │ ├── backend.h │ │ ├── common.h │ │ ├── device.h │ │ ├── gpu_device.h │ │ ├── model.h │ │ ├── npy.h │ │ ├── onnxruntime_backend.h │ │ ├── sample_library.h │ │ ├── system.h │ │ └── tflite_backend.h │ ├── src │ │ └── main.cpp │ └── tests │ │ └── win.bat ├── app-mlperf-inference-mlcommons-python │ ├── COPYRIGHT.md │ ├── README-about.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── nvidia │ │ └── retinanet.py ├── app-mlperf-inference-nvidia │ ├── COPYRIGHT.md │ ├── README-about.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── app-mlperf-inference-qualcomm │ ├── COPYRIGHT.md │ ├── README.md │ ├── README_aws_dl2q.24xlarge.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── app-mlperf-inference-redhat │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── app-mlperf-inference │ ├── COPYRIGHT.md │ ├── README-about.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── build_dockerfiles.py │ ├── customize.py │ ├── run.sh │ ├── run_config.yml │ └── verify_accuracy.sh ├── app-mlperf-training-nvidia │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-bert-training.sh │ └── run.sh ├── app-mlperf-training-reference │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-bert-training.sh │ └── run.sh ├── app-stable-diffusion-onnx-py │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── process.py │ ├── run.bat │ └── run.sh ├── authenticate-github-cli │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── benchmark-any-mlperf-inference-implementation │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run-template.sh ├── benchmark-program-mlperf │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── benchmark-program │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-ubuntu.sh │ ├── run.bat │ └── run.sh ├── build-docker-image │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── examples │ │ ├── 0-common.bat │ │ ├── 0-generate.bat │ │ ├── 1-build.bat │ │ ├── 2-run-cm-command1.bat │ │ ├── 2-run-cm-command2.bat │ │ ├── 2-run-cm-command3.bat │ │ ├── 2-run-cm-command4.bat │ │ ├── 2-run-cm-command5.bat │ │ ├── 2-run-interactive1.bat │ │ ├── 2-run-interactive2.bat │ │ ├── 3-push-to-docker-hub.bat │ │ ├── Dockerfile.cm-base-ubuntu-22.04-20230804 │ │ ├── Dockerfile.cm-base-ubuntu-23.04-20230804 │ │ ├── Dockerfile.cm-base-ubuntu-23.04-latest │ │ ├── Dockerfile.cm-script-app-image-classification-onnx-py-ubuntu-23.04-latest │ │ ├── README.md │ │ ├── computer_mouse.jpg │ │ ├── extra-cmd.cm-script-app-image-classification-onnx-py │ │ ├── run-cm-image-classification-python-onnx-with-file.bat │ │ ├── run-cm-image-classification-python-onnx-with-file.sh │ │ ├── run-cm-image-classification-python-onnx.bat │ │ └── run-cm-image-classification-python-onnx.sh │ ├── run.bat │ └── run.sh ├── build-dockerfile │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── dockerinfo.json ├── build-mlperf-inference-server-nvidia │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── calibrate-model-for.qaic │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── clean-nvidia-mlperf-inference-scratch-space │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── compile-model-for.qaic │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── compile-program │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── convert-csv-to-md │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── process.py │ ├── run.bat │ └── run.sh ├── convert-ml-model-huggingface-to-onnx │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── copy-to-clipboard │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── code.py │ ├── run.bat │ └── run.sh ├── create-conda-env │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── create-custom-cache-entry │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── create-fpgaconvnet-app-tinyml │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── create-fpgaconvnet-config-tinyml │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── create-patch │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── destroy-terraform │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── detect-cpu │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── detect-os │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ ├── run.sh │ └── run_config.yml ├── detect-sudo │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── download-and-extract │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── tests │ │ ├── download-and-extract-file.bat │ │ └── download-and-extract-file2.bat ├── download-file │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ ├── run.sh │ └── tests │ │ ├── download-file.bat │ │ └── download-file2.bat ├── download-torrent │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── draw-graph-from-json-data │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ ├── process-cm-deps.py │ ├── run.bat │ └── run.sh ├── dump-pip-freeze │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── dump.py │ ├── run.bat │ └── run.sh ├── extract-file │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── fail │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── flash-tinyml-binary │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── generate-docs-for-all-scripts.cmd ├── generate-mlperf-inference-submission │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── default_files │ │ ├── analyzer_table.md │ │ └── power_settings.md │ └── sample-cm-sut-info.json ├── generate-mlperf-inference-user-conf │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── generate-mlperf-tiny-report │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run_submission_checker.bat │ └── run_submission_checker.sh ├── generate-mlperf-tiny-submission │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── generate-nvidia-engine │ ├── COPYRIGHT.md │ ├── README-about.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-android-sdk │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── prepare-sdk-manager.bat │ └── prepare-sdk-manager.sh ├── get-aocl │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-aria2 │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── install.bat │ ├── install.sh │ ├── run.bat │ └── run.sh ├── get-aws-cli │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-bazel │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-blis │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-brew │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── run.sh ├── get-cache-dir │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-ck-repo-mlops │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── run.bat │ └── run.sh ├── get-ck │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── run.bat │ └── run.sh ├── get-cl │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.bat ├── get-cmake │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-cmsis_5 │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-compiler-flags │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-compiler-rust │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-conda │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── install.bat │ ├── install.sh │ ├── run.bat │ └── run.sh ├── get-croissant │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-cuda-devices │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── detect.py │ ├── detect.sh │ ├── print_cuda_devices.cu │ ├── run.bat │ └── run.sh ├── get-cuda │ ├── COPYRIGHT.md │ ├── README-about.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-cudnn │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-dataset-cifar10 │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── requirements.txt │ ├── run.bat │ └── run.sh ├── get-dataset-cnndm │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-intel.sh │ └── run.sh ├── get-dataset-coco │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-dataset-coco2014 │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-dataset-criteo │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── run.sh ├── get-dataset-imagenet-aux │ ├── COPYRIGHT.md │ ├── README.md │ └── _cm.yaml ├── get-dataset-imagenet-calibration │ ├── COPYRIGHT.md │ ├── README.md │ └── _cm.yaml ├── get-dataset-imagenet-helper │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── imagenet_helper │ │ └── __init__.py ├── get-dataset-imagenet-train │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-dataset-imagenet-val │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.bat ├── get-dataset-kits19 │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-dataset-librispeech │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-dataset-mlperf-inference-igbh │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-dataset-mlperf-inference-mixtral │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ ├── generate-test-dataset.py │ └── run.sh ├── get-dataset-openimages-annotations │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-dataset-openimages-calibration │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── filter.py │ └── run-filter.sh ├── get-dataset-openimages │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-dataset-openorca │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-dataset-squad-vocab │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-dataset-squad │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-dlrm-data-mlperf-inference │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── checksums.txt │ ├── customize.py │ └── run.sh ├── get-dlrm │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-docker │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── install-centos.sh │ ├── install-ubuntu.sh │ ├── install.bat │ ├── install.sh │ ├── run.bat │ └── run.sh ├── get-gcc │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-generic-python-lib │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── detect-version.py │ ├── install.bat │ ├── install.sh │ ├── run.bat │ ├── run.sh │ ├── tensorflow │ │ ├── run-aarch64.sh │ │ └── run-macos.sh │ ├── uninstall_deps.sh │ ├── validate_cache.bat │ └── validate_cache.sh ├── get-generic-sys-util │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── _cm.yaml │ ├── customize.py │ ├── detect.sh │ ├── install-with-retry.sh │ └── install.sh ├── get-gh-actions-runner │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-git-repo │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-github-cli │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-go │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-google-saxml │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-google-test │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-ipol-src │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── patch │ │ └── 20240127.patch ├── get-java │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── install-prebuilt.bat │ ├── install-prebuilt.sh │ ├── run.bat │ └── run.sh ├── get-javac │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── install-prebuilt.bat │ ├── install-prebuilt.sh │ ├── run.bat │ └── run.sh ├── get-lib-armnn │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-lib-dnnl │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-lib-protobuf │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-lib-qaic-api │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── master │ │ ├── QAicInfApi.cpp │ │ └── QAicInfApi.h │ └── run.sh ├── get-llvm │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-microtvm │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-ml-model-3d-unet-kits19 │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-ml-model-bert-base-squad │ ├── COPYRIGHT.md │ ├── README.md │ └── _cm.yaml ├── get-ml-model-bert-large-squad │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run-packed.sh ├── get-ml-model-dlrm-terabyte │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── run.sh ├── get-ml-model-efficientnet-lite │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-ml-model-gptj │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── convert_gptj_ckpt.py │ ├── customize.py │ ├── run-int4-calibration.sh │ ├── run-intel.sh │ ├── run-nvidia.sh │ ├── run-saxml-quantized.sh │ └── run-saxml.sh ├── get-ml-model-huggingface-zoo │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── download_model.py │ ├── run.bat │ └── run.sh ├── get-ml-model-llama2 │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-amd.sh │ └── run-nvidia.sh ├── get-ml-model-mixtral │ ├── COPYRIGHT.md │ ├── _cm.yaml │ └── customize.py ├── get-ml-model-mobilenet │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-ml-model-neuralmagic-zoo │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── download_sparse.py │ ├── run.bat │ └── run.sh ├── get-ml-model-resnet50 │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-fix-input.sh │ └── run_config.yml ├── get-ml-model-retinanet-nvidia │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── nvidia_patch_retinanet_efficientnms.py │ ├── polygraphy_script.sh │ └── run.sh ├── get-ml-model-retinanet │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── node-precision-info.py │ └── run-no-nms.sh ├── get-ml-model-rgat │ ├── COPYRIGHT.md │ ├── _cm.yaml │ └── customize.py ├── get-ml-model-rnnt │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-ml-model-stable-diffusion │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-ml-model-tiny-resnet │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.sh │ └── run_config.yml ├── get-ml-model-using-imagenet-from-model-zoo │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-mlperf-inference-intel-scratch-space │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-mlperf-inference-loadgen │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ ├── run.sh │ └── tests │ │ └── download-and-install.bat ├── get-mlperf-inference-nvidia-common-code │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-mlperf-inference-nvidia-scratch-space │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-mlperf-inference-results-dir │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-mlperf-inference-results │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-mlperf-inference-src │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── patch │ │ ├── coco.patch │ │ ├── git.patch │ │ ├── openimages-pycocotools.patch │ │ ├── windows-openimages.patch │ │ └── windows-openimages2.patch ├── get-mlperf-inference-submission-dir │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-mlperf-inference-sut-configs │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── configs │ │ ├── IntelSPR.24c │ │ │ └── intel-implementation │ │ │ │ └── cpu-device │ │ │ │ └── pytorch-framework │ │ │ │ ├── default-config.yaml │ │ │ │ └── framework-version-default │ │ │ │ └── default-config.yaml │ │ ├── RTX4090x1 │ │ │ └── nvidia_original-implementation │ │ │ │ └── gpu-device │ │ │ │ └── tensorrt-framework │ │ │ │ ├── default-config.yaml │ │ │ │ └── framework-version-default │ │ │ │ └── default-config.yaml │ │ ├── RTX4090x2 │ │ │ └── nvidia_original-implementation │ │ │ │ └── gpu-device │ │ │ │ └── tensorrt-framework │ │ │ │ ├── default-config.yaml │ │ │ │ └── framework-version-default │ │ │ │ └── default-config.yaml │ │ └── default │ │ │ ├── config.yaml │ │ │ └── default │ │ │ └── default-config.yaml │ └── customize.py ├── get-mlperf-inference-sut-description │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── detect_memory.sh │ ├── get_memory_info.py │ └── hardware │ │ └── default.json ├── get-mlperf-inference-utils │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── mlperf_utils.py ├── get-mlperf-logging │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-mlperf-power-dev │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-mlperf-tiny-eembc-energy-runner-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-mlperf-tiny-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-mlperf-training-nvidia-code │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-mlperf-training-src │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── patch │ │ ├── cpu_load.patch │ │ └── nvidia-retinanet.patch ├── get-nvidia-docker │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── run-ubuntu.sh ├── get-nvidia-mitten │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-onnxruntime-prebuilt │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-openssl │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-platform-details │ ├── COPYRIGHT.md │ ├── README-EXTRA.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-macos.sh │ ├── run.bat │ └── run.sh ├── get-preprocessed-dataset-criteo │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── checksums.txt │ ├── customize.py │ ├── preprocess.py │ ├── preprocess_multihot.sh │ ├── run-multihot.sh │ └── run.sh ├── get-preprocessed-dataset-generic │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── src │ │ ├── generic_preprocess.py │ │ └── preprocess_object_detection_dataset.py ├── get-preprocessed-dataset-imagenet │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── preprocess.py │ ├── run.bat │ └── run.sh ├── get-preprocessed-dataset-kits19 │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-preprocessed-dataset-librispeech │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-preprocessed-dataset-openimages │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── nvidia_preprocess.py │ ├── preprocess.py │ ├── run.bat │ └── run.sh ├── get-preprocessed-dataset-openorca │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-preprocessed-dataset-squad │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-packed.sh │ └── run.sh ├── get-python3 │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-qaic-apps-sdk │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-qaic-platform-sdk │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-qaic-software-kit │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-rclone-config │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-rclone │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── configs │ │ └── rclone.conf │ ├── customize.py │ ├── install-system-macos.sh │ ├── install-system.sh │ ├── install.bat │ ├── install.sh │ ├── run.bat │ └── run.sh ├── get-rocm-devices │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── detect.py │ ├── detect.sh │ └── run.sh ├── get-rocm │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-spec-ptd │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-sys-utils-cm │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── do_pip_installs.sh │ ├── do_pip_installs.sh.old │ ├── requirements.txt │ ├── run-arch.sh │ ├── run-debian.sh │ ├── run-macos.sh │ ├── run-rhel.sh │ ├── run-sles.sh │ └── run-ubuntu.sh ├── get-sys-utils-min │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── get-tensorrt │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-terraform │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-tvm-model │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── process.py │ └── run.sh ├── get-tvm │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-xilinx-sdk │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-zendnn │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── get-zephyr-sdk │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── get-zephyr │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-ubuntu.sh │ └── run.sh ├── gui │ ├── COPYRIGHT.md │ ├── README-about.md │ ├── README.md │ ├── _cm.yaml │ ├── app.py │ ├── customize.py │ ├── graph.py │ ├── install │ │ ├── linux.md │ │ ├── macos.md │ │ ├── redhat.md │ │ └── windows.md │ ├── misc.py │ ├── playground.py │ ├── playground_apps.py │ ├── playground_beta.py │ ├── playground_beta_README.md │ ├── playground_challenges.py │ ├── playground_challenges_with_prizes.py │ ├── playground_contributors.py │ ├── playground_howtorun.py │ ├── playground_install.py │ ├── playground_reports.py │ ├── playground_reproduce.py │ ├── playground_scripts.py │ ├── run.bat │ ├── run.sh │ ├── script.py │ └── tests │ │ ├── README.md │ │ ├── generate_password.py │ │ ├── test.cmd │ │ ├── test2.cmd │ │ ├── test3.cmd │ │ ├── test4.cmd │ │ ├── test4a.cmd │ │ ├── test4b.cmd │ │ └── test5.cmd ├── import-experiment-to-sqlite │ └── README.md ├── import-mlperf-inference-to-experiment │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── import-mlperf-tiny-to-experiment │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── assets │ │ └── cm-visualization-and-customization-of-tinymlperf-results2.png │ └── customize.py ├── import-mlperf-training-to-experiment │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run_mlperf_logger.sh ├── install-apt-package │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-aws-cli │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-bazel │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-aarch64.sh │ ├── run.bat │ └── run.sh ├── install-cmake-prebuilt │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-cuda-package-manager │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-ubuntu.sh │ └── run.sh ├── install-cuda-prebuilt │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-diffusers-from-src │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-gcc-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-generic-conda-package │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-gflags-from-src │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-gflags │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-github-cli │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-macos.sh │ ├── run-rhel.sh │ ├── run.bat │ └── run.sh ├── install-intel-neural-speed-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-ipex-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── apply_intel_3d-unet_patch.sh │ ├── apply_intel_resnet50_patch.sh │ ├── apply_intel_retinanet_patch.sh │ ├── customize.py │ └── run.sh ├── install-llvm-prebuilt │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── install-llvm-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── install-llvm-16-intel-mlperf-inference.sh │ └── run.sh ├── install-mlperf-logging-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-nccl-libs │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-ubuntu.sh │ └── run.sh ├── install-numactl-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-onednn-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-intel-mlperf-inference-bert.sh │ ├── run-intel-mlperf-inference.sh │ └── run.sh ├── install-onnxruntime-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-opencv-from-src │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-openssl │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-pip-package-for-cmind-python │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── install-python-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-python-venv │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── install-pytorch-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-intel-mlperf-inference-v3_1.sh │ ├── run-intel-mlperf-inference-vision.sh │ └── run.sh ├── install-pytorch-kineto-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-qaic-compute-sdk-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-rapidjson-from-src │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-rocm │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-rhel.sh │ ├── run-ubuntu.sh │ └── run.sh ├── install-tensorflow-for-c │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-tensorflow-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-terraform-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-tflite-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-torchvision-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-tpp-pytorch-extension │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── install-transformers-from-src │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── launch-benchmark │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── tests │ │ └── debug.py ├── plug-prebuilt-cudnn-to-cuda │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── plug-prebuilt-cusparselt-to-cuda │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── prepare-training-data-bert │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-nvidia.sh │ ├── run-reference.sh │ ├── run.sh │ └── run_config.yml ├── prepare-training-data-resnet │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-nvidia.sh │ ├── run-reference.sh │ └── run_config.yml ├── preprocess-mlperf-inference-submission │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── print-any-text │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── print-croissant-desc │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── code.py │ ├── run.bat │ └── run.sh ├── print-hello-world-java │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── code.java │ ├── run.bat │ └── run.sh ├── print-hello-world-javac │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── code.java │ ├── run.bat │ └── run.sh ├── print-hello-world-py │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── app.py │ ├── customize.py │ ├── run.bat │ └── run.sh ├── print-hello-world │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── run.bat │ └── run.sh ├── print-python-version │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── run.bat │ └── run.sh ├── process-ae-users │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── code.py │ ├── customize.py │ ├── run.bat │ └── run.sh ├── process-mlperf-accuracy │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── prune-bert-models │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── prune-docker │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── run.bat │ └── run.sh ├── publish-results-to-dashboard │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── code.py │ ├── run.bat │ └── run.sh ├── pull-git-repo │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── push-csv-to-spreadsheet │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── google_api.py │ └── run.sh ├── push-mlperf-inference-results-to-github │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── remote-run-commands │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── reproduce-ieee-acm-micro2023-paper-22 │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── install_deps.sh │ ├── install_deps_cuda.sh │ ├── install_deps_driver.sh │ ├── install_deps_pytorch.sh │ ├── install_deps_transformers.sh │ ├── run.sh │ ├── run_figure11.sh │ ├── run_figure12.sh │ └── run_figure13.sh ├── reproduce-ieee-acm-micro2023-paper-28 │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── install_deps.sh │ ├── install_spec_deps.sh │ ├── plot.sh │ ├── run.sh │ └── run_spec.sh ├── reproduce-ieee-acm-micro2023-paper-33 │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── install_deps.sh │ ├── install_deps_gem5.sh │ ├── install_deps_kernel.sh │ ├── plot.sh │ └── run.sh ├── reproduce-ieee-acm-micro2023-paper-38 │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── install_deps.bat │ ├── install_deps.sh │ ├── plot.bat │ ├── plot.sh │ ├── run.bat │ └── run.sh ├── reproduce-ieee-acm-micro2023-paper-5 │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── install_deps.bat │ ├── install_deps.sh │ ├── main.py │ ├── run.bat │ └── run.sh ├── reproduce-ieee-acm-micro2023-paper-8 │ ├── .gitignore │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── check.sh │ ├── customize.py │ ├── install_deps.bat │ ├── install_deps.sh │ ├── main.py │ ├── plot.bat │ ├── plot.sh │ ├── plot_pregenerated.sh │ ├── run.bat │ └── run.sh ├── reproduce-ieee-acm-micro2023-paper-85 │ ├── COPYRIGHT.md │ ├── Dockerfile │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── install_deps.sh │ ├── plot.sh │ └── run.sh ├── reproduce-ieee-acm-micro2023-paper-87 │ ├── COPYRIGHT.md │ ├── Preliminary_build_onikiri.sh │ ├── Preliminary_create_binary.sh │ ├── Preliminary_experiment.sh │ ├── Preliminary_experiment_setup.sh │ ├── Preliminary_plot.sh │ ├── README.md │ ├── _cm.yaml │ ├── build_compiler.sh │ ├── build_onikiri.sh │ ├── create_binary.sh │ ├── experiment.sh │ ├── experiment_setup.sh │ ├── install_deps.sh │ └── plot.sh ├── reproduce-ieee-acm-micro2023-paper-96 │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── install_deps.sh │ ├── main.py │ ├── plot.sh │ └── run.sh ├── reproduce-ipol-paper-2022-439 │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── requirements.txt │ ├── run.bat │ └── run.sh ├── reproduce-mlperf-inference-dummy │ └── README.md ├── reproduce-mlperf-octoml-tinyml-results │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── reproduce-mlperf-training-nvidia │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-resnet.sh │ └── run.sh ├── run-all-mlperf-models │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-bert-macos.sh │ ├── run-bert.sh │ ├── run-cpp-implementation.sh │ ├── run-mobilenet-models.sh │ ├── run-nvidia-4090.sh │ ├── run-nvidia-a100.sh │ ├── run-nvidia-t4.sh │ ├── run-pruned-bert.sh │ ├── run-reference-models.sh │ ├── run-resnet50-macos.sh │ ├── run-resnet50.sh │ ├── run-retinanet-sh │ └── template.sh ├── run-docker-container │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── run-mlperf-inference-app │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── faq │ │ ├── ctuning-cpp-tflite.md │ │ ├── deepsparse.md │ │ ├── intel.md │ │ ├── mlcommons-cpp.md │ │ ├── mlcommons-python.md │ │ ├── nvidia.md │ │ └── qualcomm.md │ ├── modular-cm-containers │ │ ├── README.md │ │ ├── _common.bat │ │ ├── _common.sh │ │ ├── build.bat │ │ ├── build.sh │ │ ├── mlperf-inference--ubuntu-cpu.Dockerfile │ │ ├── run.bat │ │ └── run.sh │ ├── run_mobilenet.py │ └── setup │ │ ├── b-deepsparse.md │ │ ├── i-intel.md │ │ ├── i-nvidia.md │ │ └── i-qualcomm.md ├── run-mlperf-inference-mobilenet-models │ ├── COPYRIGHT.md │ ├── README-about.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── run-mlperf-inference-submission-checker │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── code.py │ ├── customize.py │ ├── run.bat │ └── run.sh ├── run-mlperf-power-client │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── dummy.sh │ └── run.sh ├── run-mlperf-power-server │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── run-mlperf-training-submission-checker │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── run-python │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── run.bat │ └── run.sh ├── run-terraform │ ├── COPYRIGHT.md │ ├── README-about.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── aws │ │ ├── apply_credentials.sh │ │ ├── credentials.example │ │ └── main.tf │ ├── customize.py │ ├── gcp │ │ ├── apply_credentials.sh │ │ └── main.tf │ └── run.sh ├── run-vllm-server │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── runtime-system-infos │ ├── COPYRIGHT.md │ ├── _cm.yaml │ └── customize.py ├── save-mlperf-inference-implementation-state │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── set-device-settings-qaic │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── set-echo-off-win │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── set-performance-mode │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run-ubuntu.sh │ ├── run.bat │ └── run.sh ├── set-sqlite-dir │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── code.py │ ├── customize.py │ ├── run.bat │ └── run.sh ├── set-user-limits │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── set-venv │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── tar-my-folder │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ └── customize.py ├── test-cm-core │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ ├── run.sh │ └── src │ │ ├── script │ │ ├── check.py │ │ ├── process_dockerfile.py │ │ ├── process_readme.py │ │ ├── test_deps.py │ │ ├── test_docker.py │ │ ├── test_features.py │ │ └── test_install.py │ │ ├── test_cm.py │ │ ├── test_search_speed.py │ │ └── tutorials │ │ ├── test_tutorial_retinanet.py │ │ ├── test_tutorial_tvm.py │ │ ├── test_tutorial_tvm_pip_ge.py │ │ └── test_tutorial_tvm_pip_vm.py ├── test-cm-script-pipeline │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ ├── run.sh │ ├── run2.bat │ └── run2.sh ├── test-cm-scripts │ ├── COPYRIGHT.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── test-debug │ ├── .vscode │ │ └── launch.json │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── _demo.py │ ├── customize.py │ ├── python │ │ ├── .vscode │ │ │ └── launch.json │ │ └── main.py │ ├── run.bat │ └── run.sh ├── test-deps-conditions │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ └── _cm.yaml ├── test-deps-conditions2 │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ └── _cm.yaml ├── test-download-and-extract-artifacts │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── test-dummy │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── test-mlperf-inference-retinanet │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ ├── run.bat │ └── run.sh ├── test-set-sys-user-cm │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ └── run.sh ├── truncate-mlperf-inference-accuracy-log │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── upgrade-python-pip │ ├── COPYRIGHT.md │ ├── README.md │ ├── _cm.yaml │ ├── run.bat │ └── run.sh └── wrapper-reproduce-octoml-tinyml-submission │ ├── COPYRIGHT.md │ ├── README-extra.md │ ├── README.md │ ├── _cm.yaml │ ├── customize.py │ └── run.sh ├── setup.py └── tests ├── script ├── check.py ├── process_dockerfile.py ├── process_readme.py ├── process_tests.py ├── test_deps.py ├── test_docker.py ├── test_features.py └── test_install.py ├── test_cm.py ├── test_search_speed.py └── tutorials ├── test_tutorial_retinanet.py ├── test_tutorial_tvm.py ├── test_tutorial_tvm_pip_ge.py └── test_tutorial_tvm_pip_vm.py /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | https://github.com/mlcommons/ck/blob/master/CONTRIBUTING.md 2 | -------------------------------------------------------------------------------- /LICENSE.third-party.md: -------------------------------------------------------------------------------- 1 | This CM repository may contain CM scripts with third-party files licensed under Apache2, BSD or MIT license. 2 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include VERSION 3 | include git_commit_hash.txt 4 | -------------------------------------------------------------------------------- /VERSION: -------------------------------------------------------------------------------- 1 | 0.6.6 2 | -------------------------------------------------------------------------------- /automation/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone and continue development as a community effort. 4 | -------------------------------------------------------------------------------- /automation/cache/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone and continue development as a community effort. 4 | -------------------------------------------------------------------------------- /automation/cfg/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone and continue development as a community effort. 4 | -------------------------------------------------------------------------------- /automation/cfg/README-extra.md: -------------------------------------------------------------------------------- 1 | Examples: 2 | 3 | ```bash 4 | cm set cfg default 5 | cm set cfg default --key.script.silent 6 | cm set cfg default --key.script.silent- 7 | 8 | ``` 9 | -------------------------------------------------------------------------------- /automation/challenge/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone and continue development as a community effort. 4 | -------------------------------------------------------------------------------- /automation/challenge/_cm.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "challenge", 3 | "automation_alias": "automation", 4 | "automation_uid": "bbeb15d8f0a944a4", 5 | "tags": [ 6 | "automation" 7 | ], 8 | "uid": "3d84abd768f34e08" 9 | } 10 | -------------------------------------------------------------------------------- /automation/contributor/_cm.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "contributor", 3 | "automation_alias": "automation", 4 | "automation_uid": "bbeb15d8f0a944a4", 5 | "tags": [ 6 | "automation" 7 | ], 8 | "uid": "68eae17b590d4f8f" 9 | } 10 | -------------------------------------------------------------------------------- /automation/data/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone and continue development as a community effort. 4 | -------------------------------------------------------------------------------- /automation/data/_cm.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "data", 3 | "automation_alias": "automation", 4 | "automation_uid": "bbeb15d8f0a944a4", 5 | "tags": [ 6 | "automation" 7 | ], 8 | "uid": "84d8ef6914bf4d78" 9 | } 10 | -------------------------------------------------------------------------------- /automation/docker/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone and continue development as a community effort. 4 | -------------------------------------------------------------------------------- /automation/docs/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone and continue development as a community effort. 4 | -------------------------------------------------------------------------------- /automation/docs/_cm.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "docs", 3 | "automation_alias": "automation", 4 | "automation_uid": "bbeb15d8f0a944a4", 5 | "tags": [ 6 | "automation" 7 | ], 8 | "uid": "9558c9e6ca124065" 9 | } 10 | -------------------------------------------------------------------------------- /automation/experiment/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone and continue development as a community effort. 4 | -------------------------------------------------------------------------------- /automation/experiment/tests/test2.bat: -------------------------------------------------------------------------------- 1 | cm run experiment --tags=test @test_input.yaml -- echo %VAR1% --batch_size={{VAR1}} {{VAR2}} {{VAR4{['xx','yy','zz']}}}-%%VAR3%% 2 | -------------------------------------------------------------------------------- /automation/experiment/tests/test2.sh: -------------------------------------------------------------------------------- 1 | cm run experiment --tags=test @test_input.yaml -- echo "\${VAR1} --batch_size={{VAR1}} {{VAR2}} {{VAR4{['xx','yy','zz']}}}-\${VAR3}" -------------------------------------------------------------------------------- /automation/experiment/tests/test3.bat: -------------------------------------------------------------------------------- 1 | cm run experiment --tags=test @test3_input.yaml -- cm run script "print hello-world native" --env.CM_ENV_TEST1={{VAR1}} --const.CM_ENV_TEST2={{VAR2}} 2 | -------------------------------------------------------------------------------- /automation/experiment/tests/test3.sh: -------------------------------------------------------------------------------- 1 | cm run experiment --tags=test @test3_input.yaml -- cm run script "print hello-world native" --env.CM_ENV_TEST1={{VAR1}} --const.CM_ENV_TEST2={{VAR2}} 2 | -------------------------------------------------------------------------------- /automation/experiment/tests/test3_input.yaml: -------------------------------------------------------------------------------- 1 | explore: 2 | VAR1: [1,2,3] 3 | VAR2: ["a","b"] 4 | CM_ENV_TEST3: "[2**i for i in range(0,6)]" 5 | -------------------------------------------------------------------------------- /automation/experiment/tests/test__json.bat: -------------------------------------------------------------------------------- 1 | cm run experiment --tags=test @test_input.json -- {{CD}}\test_run.bat 2 | -------------------------------------------------------------------------------- /automation/experiment/tests/test__json.sh: -------------------------------------------------------------------------------- 1 | cm run experiment --tags=test @test_input.json -- {{CD}}/test_run.sh 2 | -------------------------------------------------------------------------------- /automation/experiment/tests/test__yaml.bat: -------------------------------------------------------------------------------- 1 | cm run experiment --tags=test @test_input.yaml -- {{CD}}\test_run.bat 2 | -------------------------------------------------------------------------------- /automation/experiment/tests/test__yaml.sh: -------------------------------------------------------------------------------- 1 | cm run experiment --tags=test @test_input.yaml -- {{CD}}/test_run.sh 2 | -------------------------------------------------------------------------------- /automation/experiment/tests/test_input.yaml: -------------------------------------------------------------------------------- 1 | explore: 2 | VAR1: [1,2,3] 3 | VAR2: ["a","b"] 4 | VAR3: "[2**i for i in range(0,6)]" 5 | -------------------------------------------------------------------------------- /automation/experiment/tests/test_run.bat: -------------------------------------------------------------------------------- 1 | echo %VAR1% --batch_size=%VAR3% %VAR2% 2 | 3 | echo {"x":%VAR1%, "y":"%VAR2%", "z":%VAR3%} > cm-output.json 4 | -------------------------------------------------------------------------------- /automation/experiment/tests/test_run.sh: -------------------------------------------------------------------------------- 1 | echo $VAR1 --batch_size=$VAR3 $VAR2 2 | -------------------------------------------------------------------------------- /automation/project/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone and continue development as a community effort. 4 | -------------------------------------------------------------------------------- /automation/report/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone and continue development as a community effort. 4 | -------------------------------------------------------------------------------- /automation/report/_cm.json: -------------------------------------------------------------------------------- 1 | { 2 | "alias": "report", 3 | "automation_alias": "automation", 4 | "automation_uid": "bbeb15d8f0a944a4", 5 | "tags": [ 6 | "automation" 7 | ], 8 | "uid": "6462ecdba2054467" 9 | } 10 | -------------------------------------------------------------------------------- /automation/script/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone and continue development as a community effort. 4 | -------------------------------------------------------------------------------- /automation/script/assets/scripts-workflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/automation/script/assets/scripts-workflow.png -------------------------------------------------------------------------------- /automation/script/docker_repro_example/ubuntu-23.04.Dockerfile.build.bat: -------------------------------------------------------------------------------- 1 | docker build -f "ubuntu-23.04.Dockerfile" -t "cknowledge/test-cm-script:ubuntu-23.04-cm-dev" . 2 | -------------------------------------------------------------------------------- /automation/script/docker_repro_example/ubuntu-23.04.Dockerfile.build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker build -f "ubuntu-23.04.Dockerfile" -t "cknowledge/test-cm-script:ubuntu-23.04-cm-dev" . 4 | -------------------------------------------------------------------------------- /automation/script/docker_repro_example/ubuntu-23.04.Dockerfile.run.bat: -------------------------------------------------------------------------------- 1 | docker run -it --entrypoint "" cknowledge/test-cm-script:ubuntu-23.04-cm-dev bash 2 | -------------------------------------------------------------------------------- /automation/script/docker_repro_example/ubuntu-23.04.Dockerfile.run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker run -it --entrypoint "" cknowledge/test-cm-script:ubuntu-23.04-cm-dev bash 4 | -------------------------------------------------------------------------------- /automation/script/template-ae-python/README-extra.md: -------------------------------------------------------------------------------- 1 | # CM script to run and reproduce experiments 2 | 3 | -------------------------------------------------------------------------------- /automation/script/template-python/README-extra.md: -------------------------------------------------------------------------------- 1 | # CM script 2 | -------------------------------------------------------------------------------- /automation/script/template-python/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/automation/script/template-python/requirements.txt -------------------------------------------------------------------------------- /automation/script/template-pytorch/README-extra.md: -------------------------------------------------------------------------------- 1 | # CM script 2 | -------------------------------------------------------------------------------- /automation/script/template-pytorch/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/automation/script/template-pytorch/requirements.txt -------------------------------------------------------------------------------- /automation/script/template/README-extra.md: -------------------------------------------------------------------------------- 1 | # CM script 2 | -------------------------------------------------------------------------------- /automation/script/template/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /automation/utils/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone and continue development as a community effort. 4 | -------------------------------------------------------------------------------- /cfg/benchmark-hardware-compute/amd-cpu-x64.json: -------------------------------------------------------------------------------- 1 | { 2 | "uid": "cdfd424c32734e38", 3 | "name": "AMD - x64", 4 | "tags": "cpu,x64,generic,amd", 5 | "mlperf_inference_device": "cpu" 6 | } 7 | -------------------------------------------------------------------------------- /cfg/benchmark-hardware-compute/amd-gpu.json: -------------------------------------------------------------------------------- 1 | { 2 | "uid": "d8f06040f7294319", 3 | "name": "AMD - GPU", 4 | "tags": "gpu,amd", 5 | "mlperf_inference_device": "rocm" 6 | } 7 | -------------------------------------------------------------------------------- /cfg/benchmark-hardware-compute/generic-cpu-arm64.json: -------------------------------------------------------------------------------- 1 | { 2 | "uid":"357a972e79614903", 3 | "name": "Arm - AArch64", 4 | "tags": "cpu,arm64,aarch64,generic", 5 | "mlperf_inference_device": "cpu" 6 | } 7 | -------------------------------------------------------------------------------- /cfg/benchmark-hardware-compute/google-tpu.json: -------------------------------------------------------------------------------- 1 | { 2 | "uid": "b3be7ac9ef954f5a", 3 | "name": "Google - TPU", 4 | "tags": "tpu,google", 5 | "mlperf_inference_device": "tpu" 6 | } 7 | -------------------------------------------------------------------------------- /cfg/benchmark-hardware-compute/habana-gaudi.json: -------------------------------------------------------------------------------- 1 | { 2 | "uid": "a42388a2a8cd412c", 3 | "name": "Intel/Habana - Gauidi 2", 4 | "tags": "gaudi,habana", 5 | "mlperf_inference_device": "gaudi" 6 | } 7 | -------------------------------------------------------------------------------- /cfg/benchmark-hardware-compute/intel-cpu-x64.json: -------------------------------------------------------------------------------- 1 | { 2 | "uid": "ee8c568e0ac44f2b", 3 | "name": "Intel - x64", 4 | "tags": "cpu,x64,generic,intel", 5 | "mlperf_inference_device": "cpu" 6 | } 7 | -------------------------------------------------------------------------------- /cfg/benchmark-hardware-compute/nvidia-gpu-jetson-orin.yaml: -------------------------------------------------------------------------------- 1 | uid: fe379ecd1e054a00 2 | 3 | tags: gpu,nvidia,jetson,orin 4 | 5 | name: "Nvidia - GPU - Jetson Orin" 6 | 7 | mlperf_inference_device: cuda 8 | -------------------------------------------------------------------------------- /cfg/benchmark-hardware-compute/nvidia-gpu.json: -------------------------------------------------------------------------------- 1 | { 2 | "uid": "fe379ecd1e054a00", 3 | "name": "Nvidia - GPU", 4 | "tags": "gpu,nvidia", 5 | "mlperf_inference_device": "cuda" 6 | } 7 | -------------------------------------------------------------------------------- /cfg/benchmark-hardware-compute/qualcomm-ai100.json: -------------------------------------------------------------------------------- 1 | { 2 | "uid": "d2ae645066664463", 3 | "name": "Qualcomm - AI 100", 4 | "tags": "accelerator,acc,qualcomm,ai,100,ai-100", 5 | "mlperf_inference_device": "qaic" 6 | } 7 | -------------------------------------------------------------------------------- /cfg/benchmark-hardware-compute/stm-32L4R5ZIT6U-NUCLEO-L4R5ZI.yaml: -------------------------------------------------------------------------------- 1 | uid: 2cd26d4f92ca4b85 2 | 3 | tags: stm,stm32,stm32l4r5zit6u,nucleo,l4r5zi 4 | 5 | name: "STM32L4R5ZIT6U - NUCLEO-L4R5ZI" 6 | -------------------------------------------------------------------------------- /cfg/benchmark-run-mlperf-inference-v3.1/run-7f094c244ebb4985.md: -------------------------------------------------------------------------------- 1 | TBD1 2 | -------------------------------------------------------------------------------- /cfg/benchmark-run-mlperf-inference-v3.1/run-df843c22cbf54aaf.md: -------------------------------------------------------------------------------- 1 | TBD2 2 | -------------------------------------------------------------------------------- /cfg/docker-basic-configurations/_cm.yaml: -------------------------------------------------------------------------------- 1 | alias: docker-basic-configurations 2 | uid: d2a0c5bb17664c93 3 | 4 | automation_alias: cfg 5 | automation_uid: 88dce9c160324c5d 6 | 7 | tags: 8 | - docker 9 | - basic 10 | - configurations 11 | -------------------------------------------------------------------------------- /cfg/docker-basic-configurations/basic-archlinux.yaml: -------------------------------------------------------------------------------- 1 | uid: 9960e9fb3cb24cb3 2 | 3 | name: "Basic ArchLinux" 4 | 5 | input: 6 | docker_base_image: 'archlinux' 7 | docker_os: arch 8 | docker_os_version: 'latest' 9 | -------------------------------------------------------------------------------- /cfg/docker-basic-configurations/basic-rhel-9.yaml: -------------------------------------------------------------------------------- 1 | uid: 27b4afcdd8e042e8 2 | 3 | name: "Basic RHEL 9" 4 | 5 | input: 6 | docker_base_image: 'registry.access.redhat.com/ubi9' 7 | docker_os: 'rhel' 8 | docker_os_version: '9' 9 | -------------------------------------------------------------------------------- /cfg/docker-basic-configurations/basic-ubuntu-20.04.yaml: -------------------------------------------------------------------------------- 1 | uid: 59311e6098c14b21 2 | 3 | name: "Basic Ubuntu 20.04" 4 | 5 | input: 6 | docker_base_image: 'ubuntu:20.04' 7 | docker_os: ubuntu 8 | docker_os_version: '20.04' 9 | -------------------------------------------------------------------------------- /cfg/docker-basic-configurations/basic-ubuntu-22.04.yaml: -------------------------------------------------------------------------------- 1 | uid: 614aa48d90724835 2 | 3 | name: "Basic Ubuntu 22.04" 4 | 5 | input: 6 | docker_base_image: 'ubuntu:22.04' 7 | docker_os: ubuntu 8 | docker_os_version: '22.04' 9 | -------------------------------------------------------------------------------- /cfg/docker-basic-configurations/basic-ubuntu-23.04.yaml: -------------------------------------------------------------------------------- 1 | uid: 276bd8ab39324f5f 2 | 3 | name: "Basic Ubuntu 23.04" 4 | 5 | input: 6 | docker_base_image: 'ubuntu:23.04' 7 | docker_os: ubuntu 8 | docker_os_version: '23.04' 9 | -------------------------------------------------------------------------------- /cfg/docker-basic-configurations/basic-ubuntu-24.04.yaml: -------------------------------------------------------------------------------- 1 | uid: 12e86eb386314866 2 | 3 | name: "Basic Ubuntu 24.04" 4 | 5 | input: 6 | docker_base_image: 'ubuntu:24.04' 7 | docker_os: ubuntu 8 | docker_os_version: '24.04' 9 | -------------------------------------------------------------------------------- /cmr.yaml: -------------------------------------------------------------------------------- 1 | alias: mlcommons@cm4mlops 2 | uid: 9e97bb72b0474657 3 | 4 | git: true 5 | 6 | version: 2.3.9 7 | 8 | deps: 9 | - alias: mlcommons@ck 10 | uid: a4705959af8e447a 11 | conflict: True 12 | 13 | -------------------------------------------------------------------------------- /debug.py: -------------------------------------------------------------------------------- 1 | # Developer: Grigori Fursin 2 | 3 | import cmind 4 | import sys 5 | 6 | print(sys.executable) 7 | 8 | r = cmind.access( 9 | 'run script "print hello-world python" --debug_uid=f52670e5f3f345a2') 10 | print(r) 11 | -------------------------------------------------------------------------------- /docs/img/pages (80).png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/docs/img/pages (80).png -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | mkdocs-material 2 | swagger-markdown 3 | mkdocs-macros-plugin 4 | ruamel.yaml 5 | slugify 6 | mkdocs-caseinsensitive-plugin 7 | -------------------------------------------------------------------------------- /docs/scripts/AI-ML-optimization/index.md: -------------------------------------------------------------------------------- 1 | * [calibrate-model-for.qaic](calibrate-model-for.qaic/index.md) 2 | * [compile-model-for.qaic](compile-model-for.qaic/index.md) 3 | * [prune-bert-models](prune-bert-models/index.md) 4 | -------------------------------------------------------------------------------- /docs/scripts/CM-Interface/index.md: -------------------------------------------------------------------------------- 1 | * [get-cache-dir](get-cache-dir/index.md) 2 | -------------------------------------------------------------------------------- /docs/scripts/CM-automation/index.md: -------------------------------------------------------------------------------- 1 | * [create-custom-cache-entry](create-custom-cache-entry/index.md) 2 | -------------------------------------------------------------------------------- /docs/scripts/CM-interface-prototyping/index.md: -------------------------------------------------------------------------------- 1 | * [test-debug](test-debug/index.md) 2 | * [test-mlperf-inference-retinanet](test-mlperf-inference-retinanet/index.md) 3 | -------------------------------------------------------------------------------- /docs/scripts/Collective-benchmarking/index.md: -------------------------------------------------------------------------------- 1 | * [launch-benchmark](launch-benchmark/index.md) 2 | -------------------------------------------------------------------------------- /docs/scripts/Dashboard-automation/index.md: -------------------------------------------------------------------------------- 1 | * [publish-results-to-dashboard](publish-results-to-dashboard/index.md) 2 | -------------------------------------------------------------------------------- /docs/scripts/GUI/index.md: -------------------------------------------------------------------------------- 1 | * [gui](gui/index.md) 2 | -------------------------------------------------------------------------------- /docs/scripts/Legacy-CK-support/index.md: -------------------------------------------------------------------------------- 1 | * [get-ck](get-ck/index.md) 2 | * [get-ck-repo-mlops](get-ck-repo-mlops/index.md) 3 | -------------------------------------------------------------------------------- /docs/scripts/Modular-MLPerf-training-benchmark-pipeline/index.md: -------------------------------------------------------------------------------- 1 | * [app-mlperf-training-nvidia](app-mlperf-training-nvidia/index.md) 2 | * [app-mlperf-training-reference](app-mlperf-training-reference/index.md) 3 | -------------------------------------------------------------------------------- /docs/scripts/Modular-application-pipeline/index.md: -------------------------------------------------------------------------------- 1 | * [app-image-corner-detection](app-image-corner-detection/index.md) 2 | -------------------------------------------------------------------------------- /docs/scripts/Platform-information/index.md: -------------------------------------------------------------------------------- 1 | * [detect-cpu](detect-cpu/index.md) 2 | * [detect-os](detect-os/index.md) 3 | -------------------------------------------------------------------------------- /docs/scripts/Remote-automation/index.md: -------------------------------------------------------------------------------- 1 | * [remote-run-commands](remote-run-commands/index.md) 2 | -------------------------------------------------------------------------------- /git_commit_hash.txt: -------------------------------------------------------------------------------- 1 | cfd9892cc83e4bd869789ded9291fbcc2d058045 2 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=60", "wheel", "cmind"] 3 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | cmind>=2.0.1 2 | pyyaml 3 | requests 4 | setuptools 5 | giturlparse 6 | tabulate 7 | -------------------------------------------------------------------------------- /script/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/activate-python-venv/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/activate-python-venv/run.bat: -------------------------------------------------------------------------------- 1 | echo. 2 | echo call "%CM_VIRTUAL_ENV_SCRIPTS_PATH%\activate.bat && cmd" 3 | echo. 4 | echo Enter exit to exit virtual env. 5 | echo. 6 | 7 | call %CM_VIRTUAL_ENV_SCRIPTS_PATH%\activate.bat && cmd 8 | -------------------------------------------------------------------------------- /script/add-custom-nvidia-system/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/add-custom-nvidia-system/README-extra.md: -------------------------------------------------------------------------------- 1 | # About 2 | This CM script detects the system details using Nvidia script 3 | -------------------------------------------------------------------------------- /script/add-custom-nvidia-system/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | CUR=$PWD 3 | cd ${CM_MLPERF_INFERENCE_NVIDIA_CODE_PATH} 4 | ${CM_PYTHON_BIN_WITH_PATH} scripts/custom_systems/add_custom_system.py 5 | test $? -eq 0 || exit $? 6 | -------------------------------------------------------------------------------- /script/app-image-classification-onnx-py/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-image-classification-onnx-py/img/computer_mouse.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/script/app-image-classification-onnx-py/img/computer_mouse.jpg -------------------------------------------------------------------------------- /script/app-image-classification-onnx-py/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/script/app-image-classification-onnx-py/requirements.txt -------------------------------------------------------------------------------- /script/app-image-classification-tf-onnx-cpp/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-image-classification-tf-onnx-cpp/README-extra.md: -------------------------------------------------------------------------------- 1 | # Image Classification App in C++ for ResNet50 model 2 | 3 | * In development stage, not complete 4 | -------------------------------------------------------------------------------- /script/app-image-classification-torch-py/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-image-classification-torch-py/img/computer_mouse.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/script/app-image-classification-torch-py/img/computer_mouse.jpg -------------------------------------------------------------------------------- /script/app-image-classification-torch-py/requirements.txt: -------------------------------------------------------------------------------- 1 | Pillow 2 | requests 3 | numpy 4 | 5 | -------------------------------------------------------------------------------- /script/app-image-classification-tvm-onnx-py/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-image-classification-tvm-onnx-py/img/computer_mouse.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/script/app-image-classification-tvm-onnx-py/img/computer_mouse.jpg -------------------------------------------------------------------------------- /script/app-image-classification-tvm-onnx-py/requirements.txt: -------------------------------------------------------------------------------- 1 | matplotlib 2 | opencv-python 3 | scipy 4 | onnx 5 | decorator 6 | attrs 7 | psutil 8 | -------------------------------------------------------------------------------- /script/app-image-corner-detection/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-image-corner-detection/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | CUR=${CM_TMP_CURRENT_SCRIPT_PATH:-$PWD} 4 | mkdir -p $CUR"/output" 5 | 6 | test $? -eq 0 || exit 1 7 | -------------------------------------------------------------------------------- /script/app-loadgen-generic-python/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-loadgen-generic-python/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | 3 | %CM_PYTHON_BIN_WITH_PATH% %CM_TMP_CURRENT_SCRIPT_PATH%\src\main.py %CM_RUN_OPTS% 4 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 5 | -------------------------------------------------------------------------------- /script/app-loadgen-generic-python/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/src/main.py ${CM_RUN_OPTS} 4 | test $? -eq 0 || exit 1 5 | -------------------------------------------------------------------------------- /script/app-loadgen-generic-python/tests/modular-cm-containers/run.bat: -------------------------------------------------------------------------------- 1 | call _common.bat 2 | 3 | docker run -it %CM_DOCKER_ORG%/%CM_DOCKER_NAME%-%CM_HW_TARGET%:%CM_OS_NAME%-%CM_OS_VERSION% 4 | -------------------------------------------------------------------------------- /script/app-loadgen-generic-python/tests/modular-cm-containers/run.sh: -------------------------------------------------------------------------------- 1 | . ./_common.sh 2 | 3 | docker run -it ${CM_DOCKER_ORG}/${CM_DOCKER_NAME}-%CM_HW_TARGET%:${CM_OS_NAME}-${CM_OS_VERSION} 4 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-amd/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-amd/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [[ ${CM_CALL_MLPERF_RUNNER} == "no" ]]; then 3 | cd ${CM_RUN_DIR} 4 | cmd=${CM_RUN_CMD} 5 | echo "${cmd}" 6 | eval "${cmd}" 7 | test $? -eq 0 || exit $? 8 | fi 9 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-ctuning-cpp-tflite/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-dummy/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-dummy/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [[ ${CM_CALL_MLPERF_RUNNER} == "no" ]]; then 3 | cd ${CM_RUN_DIR} 4 | cmd=${CM_RUN_CMD} 5 | echo "${cmd}" 6 | eval "${cmd}" 7 | test $? -eq 0 || exit $? 8 | fi 9 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-intel/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-mlcommons-cpp/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-mlcommons-python/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-nvidia/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-nvidia/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [[ ${CM_CALL_MLPERF_RUNNER} == "no" ]]; then 3 | cd ${CM_RUN_DIR} 4 | cmd=${CM_RUN_CMD} 5 | echo "${cmd}" 6 | eval "${cmd}" 7 | test $? -eq 0 || exit $? 8 | fi 9 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-qualcomm/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-qualcomm/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [[ ${CM_CALL_MLPERF_RUNNER} == "no" ]]; then 3 | cd ${CM_RUN_DIR} 4 | cmd=${CM_RUN_CMD} 5 | echo "${cmd}" 6 | eval "${cmd}" 7 | test $? -eq 0 || exit $? 8 | fi 9 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-redhat/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-mlperf-inference-redhat/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [[ ${CM_CALL_MLPERF_RUNNER} == "no" ]]; then 3 | cd ${CM_RUN_DIR} 4 | cmd=${CM_RUN_CMD} 5 | echo "${cmd}" 6 | eval "${cmd}" 7 | test $? -eq 0 || exit $? 8 | fi 9 | -------------------------------------------------------------------------------- /script/app-mlperf-inference/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-mlperf-inference/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cmd="${CMD}" 4 | if [[ -n ${cmd} ]]; then 5 | echo "$cmd" 6 | eval "$cmd" 7 | test $? -eq 0 || exit $? 8 | fi 9 | -------------------------------------------------------------------------------- /script/app-mlperf-inference/verify_accuracy.sh: -------------------------------------------------------------------------------- 1 | #/bin/bash 2 | echo "Running: $CMD" 3 | eval $CMD 4 | test $? -eq 0 || exit $? 5 | -------------------------------------------------------------------------------- /script/app-mlperf-training-nvidia/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-mlperf-training-reference/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-stable-diffusion-onnx-py/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/app-stable-diffusion-onnx-py/run.bat: -------------------------------------------------------------------------------- 1 | %CM_PYTHON_BIN_WITH_PATH% %CM_TMP_CURRENT_SCRIPT_PATH%\process.py 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | -------------------------------------------------------------------------------- /script/app-stable-diffusion-onnx-py/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ${CM_PYTHON_BIN} ${CM_TMP_CURRENT_SCRIPT_PATH}/process.py 4 | test $? -eq 0 || exit 1 5 | -------------------------------------------------------------------------------- /script/authenticate-github-cli/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/authenticate-github-cli/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts//authenticate-github-cli](https://docs.mlcommons.org/cm4mlops/scripts//authenticate-github-cli) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/benchmark-any-mlperf-inference-implementation/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/benchmark-program-mlperf/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/benchmark-program/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/benchmark-program/README-extra.md: -------------------------------------------------------------------------------- 1 | This is a universal script to run and profile programs. 2 | 3 | It is a part of our universal benchmarking and optimization roadmap: https://github.com/mlcommons/cm4mlops/issues/23 4 | -------------------------------------------------------------------------------- /script/build-docker-image/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/build-docker-image/examples/2-run-cm-command1.bat: -------------------------------------------------------------------------------- 1 | call 0-common.bat 2 | 3 | docker run -it %DOCKER_IMAGE_REPO%/%DOCKER_IMAGE_NAME%:%DOCKER_IMAGE_TAG% "cmr 'detect os' -j" 4 | -------------------------------------------------------------------------------- /script/build-docker-image/examples/2-run-cm-command2.bat: -------------------------------------------------------------------------------- 1 | call 0-common.bat 2 | 3 | cmr "run docker container" --image_repo=%DOCKER_IMAGE_REPO% --image_name=%DOCKER_IMAGE_NAME% --image_tag=%DOCKER_IMAGE_TAG% --run_cmd="cmr 'detect os' -j" 4 | -------------------------------------------------------------------------------- /script/build-docker-image/examples/2-run-cm-command3.bat: -------------------------------------------------------------------------------- 1 | call 0-common.bat 2 | 3 | cmr "run docker container" --image_repo=%DOCKER_IMAGE_REPO% --image_tag=%DOCKER_IMAGE_TAG% --script_tags=detect,os 4 | -------------------------------------------------------------------------------- /script/build-docker-image/examples/2-run-cm-command4.bat: -------------------------------------------------------------------------------- 1 | call 0-common.bat 2 | 3 | cmr "run docker container" --image_repo=%DOCKER_IMAGE_REPO% --image_tag=%DOCKER_IMAGE_TAG% --script_tags=detect,os --it 4 | -------------------------------------------------------------------------------- /script/build-docker-image/examples/2-run-cm-command5.bat: -------------------------------------------------------------------------------- 1 | call 0-common.bat 2 | 3 | cm docker script --tags=detect,os -j 4 | -------------------------------------------------------------------------------- /script/build-docker-image/examples/2-run-interactive1.bat: -------------------------------------------------------------------------------- 1 | call 0-common.bat 2 | 3 | docker run -it %DOCKER_IMAGE_REPO%/%DOCKER_IMAGE_NAME%:%DOCKER_IMAGE_TAG% -c bash 4 | -------------------------------------------------------------------------------- /script/build-docker-image/examples/2-run-interactive2.bat: -------------------------------------------------------------------------------- 1 | call 0-common.bat 2 | 3 | cmr "run docker container" --image_repo=%DOCKER_IMAGE_REPO% --image_name=%DOCKER_IMAGE_NAME% --image_tag=%DOCKER_IMAGE_TAG% --it 4 | -------------------------------------------------------------------------------- /script/build-docker-image/examples/3-push-to-docker-hub.bat: -------------------------------------------------------------------------------- 1 | call 0-common.bat 2 | 3 | docker push %DOCKER_IMAGE_REPO%/%DOCKER_IMAGE_NAME%:%DOCKER_IMAGE_TAG% 4 | -------------------------------------------------------------------------------- /script/build-docker-image/examples/README.md: -------------------------------------------------------------------------------- 1 | https://hub.docker.com/r/cknowledge/cm-base/tags 2 | -------------------------------------------------------------------------------- /script/build-docker-image/examples/computer_mouse.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/script/build-docker-image/examples/computer_mouse.jpg -------------------------------------------------------------------------------- /script/build-dockerfile/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/build-mlperf-inference-server-nvidia/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/build-mlperf-inference-server-nvidia/README-extra.md: -------------------------------------------------------------------------------- 1 | # About 2 | This CM script builds the Nvidia C++ implementation of MLPerf Inference 3 | -------------------------------------------------------------------------------- /script/calibrate-model-for.qaic/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/clean-nvidia-mlperf-inference-scratch-space/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/clean-nvidia-mlperf-inference-scratch-space/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/compile-model-for.qaic/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/compile-program/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/compile-program/README-extra.md: -------------------------------------------------------------------------------- 1 | This script compiles C and C++ programs. 2 | 3 | It is a part of our universal benchmarking and optimization roadmap: https://github.com/mlcommons/cm4mlops/issues/23 4 | -------------------------------------------------------------------------------- /script/compile-program/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/compile-program](https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/compile-program) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/convert-csv-to-md/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/convert-csv-to-md/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/convert-ml-model-huggingface-to-onnx/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/convert-ml-model-huggingface-to-onnx/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | python -m transformers.onnx --model=${CM_MODEL_HUGG_PATH} ${PWD} -------------------------------------------------------------------------------- /script/copy-to-clipboard/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/copy-to-clipboard/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | 3 | %CM_PYTHON_BIN_WITH_PATH% %CM_TMP_CURRENT_SCRIPT_PATH%\code.py 4 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 5 | -------------------------------------------------------------------------------- /script/copy-to-clipboard/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/code.py 4 | test $? -eq 0 || exit 1 5 | -------------------------------------------------------------------------------- /script/create-conda-env/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/create-conda-env/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cmd="${CM_CONDA_BIN_WITH_PATH} create -p ${PWD}" 4 | echo "$cmd" 5 | eval "$cmd" 6 | test $? -eq 0 || exit $? 7 | 8 | -------------------------------------------------------------------------------- /script/create-custom-cache-entry/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/create-fpgaconvnet-app-tinyml/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/create-fpgaconvnet-config-tinyml/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/create-patch/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/create-patch/README-extra.md: -------------------------------------------------------------------------------- 1 | # Examples 2 | 3 | ``` 4 | cmr "create patch" --new=new --old=old --exclude=.git,__pycache_ 5 | ``` 6 | -------------------------------------------------------------------------------- /script/create-patch/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/create-patch](https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/create-patch) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/destroy-terraform/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/destroy-terraform/README-extra.md: -------------------------------------------------------------------------------- 1 | This CM script is automatically called from run-terraform script when `--destroy` option is given. 2 | -------------------------------------------------------------------------------- /script/destroy-terraform/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/detect-cpu/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/detect-cpu/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Platform-information/detect-cpu](https://docs.mlcommons.org/cm4mlops/scripts/Platform-information/detect-cpu) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/detect-cpu/run.bat: -------------------------------------------------------------------------------- 1 | rem systeminfo /fo csv > tmp-systeminfo.csv 2 | wmic cpu get /FORMAT:csv > tmp-wmic-cpu.csv 3 | -------------------------------------------------------------------------------- /script/detect-os/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/detect-os/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Platform-information/detect-os](https://docs.mlcommons.org/cm4mlops/scripts/Platform-information/detect-os) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/detect-os/run.bat: -------------------------------------------------------------------------------- 1 | echo {"detect-os-test":"win"} > tmp-run-state.json 2 | -------------------------------------------------------------------------------- /script/detect-os/run_config.yml: -------------------------------------------------------------------------------- 1 | docker: 2 | build: true 3 | docker_os: ubuntu 4 | docker_os_version: "22.04" 5 | 6 | run_with_default_inputs: true #if false the script won't run automatic tests 7 | -------------------------------------------------------------------------------- /script/detect-sudo/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/detect-sudo/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/detect-sudo](https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/detect-sudo) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/download-and-extract/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/download-file/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/download-file/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/download-file](https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/download-file) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/download-torrent/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/draw-graph-from-json-data/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/draw-graph-from-json-data/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/dump-pip-freeze/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/dump-pip-freeze/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts//dump-pip-freeze](https://docs.mlcommons.org/cm4mlops/scripts//dump-pip-freeze) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/dump-pip-freeze/run.bat: -------------------------------------------------------------------------------- 1 | if not "%CM_FAKE_RUN%" == "yes" ( 2 | %CM_PYTHON_BIN_WITH_PATH% %CM_TMP_CURRENT_SCRIPT_PATH%\dump.py 3 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 4 | ) 5 | -------------------------------------------------------------------------------- /script/extract-file/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/extract-file/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/extract-file](https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/extract-file) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/fail/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/fail/README-extra.md: -------------------------------------------------------------------------------- 1 | # CM script 2 | -------------------------------------------------------------------------------- /script/fail/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/fail](https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/fail) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/flash-tinyml-binary/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/generate-docs-for-all-scripts.cmd: -------------------------------------------------------------------------------- 1 | cm doc script --repos=mlcommons@cm4mlops --output_dir=.. -------------------------------------------------------------------------------- /script/generate-mlperf-inference-submission/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/generate-mlperf-inference-submission/default_files/power_settings.md: -------------------------------------------------------------------------------- 1 | No special setting for power management is done. Out-of-the-box OS settings are used. 2 | -------------------------------------------------------------------------------- /script/generate-mlperf-inference-user-conf/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/generate-mlperf-tiny-report/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/generate-mlperf-tiny-submission/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/generate-mlperf-tiny-submission/README-extra.md: -------------------------------------------------------------------------------- 1 | # Generate MLPerf Tiny Submission Folder from a Results Directory 2 | 3 | This is a work in progress script. 4 | -------------------------------------------------------------------------------- /script/generate-nvidia-engine/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/generate-nvidia-engine/README-about.md: -------------------------------------------------------------------------------- 1 | This CM script is in draft stage 2 | -------------------------------------------------------------------------------- /script/get-android-sdk/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-android-sdk/README-extra.md: -------------------------------------------------------------------------------- 1 | # About 2 | 3 | https://developer.android.com/studio#command-line-tools-only 4 | -------------------------------------------------------------------------------- /script/get-aocl/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-aocl/README-extra.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/script/get-aocl/README-extra.md -------------------------------------------------------------------------------- /script/get-aocl/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Compiler-automation/get-aocl](https://docs.mlcommons.org/cm4mlops/scripts/Compiler-automation/get-aocl) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-aocl/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [[ -z ${CM_GIT_REPO_CHECKOUT_PATH} ]]; then 3 | echo "Git repository not found!" 4 | exit 1 5 | fi 6 | cd ${CM_GIT_REPO_CHECKOUT_PATH} 7 | scons 8 | test $? -eq 0 || exit $? 9 | 10 | -------------------------------------------------------------------------------- /script/get-aria2/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-aria2/run.bat: -------------------------------------------------------------------------------- 1 | rem Detect version 2 | 3 | %CM_ARIA2_BIN_WITH_PATH% --version > tmp-ver.out 4 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 5 | -------------------------------------------------------------------------------- /script/get-aria2/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Detect version 4 | 5 | ${CM_ARIA2_BIN_WITH_PATH} --version > tmp-ver.out 6 | test $? -eq 0 || exit 1 7 | -------------------------------------------------------------------------------- /script/get-aws-cli/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-aws-cli/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Cloud-automation/get-aws-cli](https://docs.mlcommons.org/cm4mlops/scripts/Cloud-automation/get-aws-cli) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-aws-cli/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | aws --version > tmp-ver.out 3 | test $? -eq 0 || exit 1 4 | -------------------------------------------------------------------------------- /script/get-bazel/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-bazel/run.bat: -------------------------------------------------------------------------------- 1 | %CM_BAZEL_BIN_WITH_PATH% --version > tmp-ver.out 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | -------------------------------------------------------------------------------- /script/get-blis/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-blis/README-extra.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/script/get-blis/README-extra.md -------------------------------------------------------------------------------- /script/get-blis/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/get-brew/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-brew/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" 3 | test $? -eq 0 || exit $? 4 | -------------------------------------------------------------------------------- /script/get-cache-dir/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-cache-dir/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/CM-Interface/get-cache-dir](https://docs.mlcommons.org/cm4mlops/scripts/CM-Interface/get-cache-dir) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-ck-repo-mlops/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ck-repo-mlops/run.bat: -------------------------------------------------------------------------------- 1 | ck pull repo:mlcommons@ck-mlops 2 | -------------------------------------------------------------------------------- /script/get-ck-repo-mlops/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ck pull repo:mlcommons@ck-mlops 4 | 5 | -------------------------------------------------------------------------------- /script/get-ck/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ck/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Legacy-CK-support/get-ck](https://docs.mlcommons.org/cm4mlops/scripts/Legacy-CK-support/get-ck) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-ck/_cm.yaml: -------------------------------------------------------------------------------- 1 | alias: get-ck 2 | automation_alias: script 3 | automation_uid: 5b4e0237da074764 4 | cache: true 5 | category: Legacy CK support 6 | tags: 7 | - get 8 | - ck 9 | - ck-framework 10 | uid: 5575126797174cac 11 | -------------------------------------------------------------------------------- /script/get-ck/run.bat: -------------------------------------------------------------------------------- 1 | pip install ck 2 | -------------------------------------------------------------------------------- /script/get-ck/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | python3 -m pip install ck ${CM_CK_FRAMEWORK_INSTALL_CLI} 4 | -------------------------------------------------------------------------------- /script/get-cl/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-cl/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Compiler-automation/get-cl](https://docs.mlcommons.org/cm4mlops/scripts/Compiler-automation/get-cl) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-cl/run.bat: -------------------------------------------------------------------------------- 1 | "%CM_CL_BIN_WITH_PATH%" > tmp-ver.out 2>&1 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | 4 | -------------------------------------------------------------------------------- /script/get-cmake/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-cmake/run.bat: -------------------------------------------------------------------------------- 1 | %CM_CMAKE_BIN_WITH_PATH% --version > tmp-ver.out 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | -------------------------------------------------------------------------------- /script/get-cmake/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cmake_bin=${CM_CMAKE_BIN_WITH_PATH} 3 | 4 | ${cmake_bin} --version > tmp-ver.out 5 | test $? -eq 0 || exit 1 6 | -------------------------------------------------------------------------------- /script/get-cmsis_5/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-compiler-flags/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-compiler-rust/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-conda/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-conda/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/get-conda](https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/get-conda) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-conda/run.bat: -------------------------------------------------------------------------------- 1 | %CM_CONDA_BIN_WITH_PATH% --version > tmp-ver.out 2 | -------------------------------------------------------------------------------- /script/get-conda/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ${CM_CONDA_BIN_WITH_PATH} --version > tmp-ver.out 4 | -------------------------------------------------------------------------------- /script/get-croissant/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-croissant/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-datasets/get-croissant](https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-datasets/get-croissant) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-cuda-devices/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-cuda-devices/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/CUDA-automation/get-cuda-devices](https://docs.mlcommons.org/cm4mlops/scripts/CUDA-automation/get-cuda-devices) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-cuda-devices/detect.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/detect.py 4 | test $? -eq 0 || exit $? 5 | -------------------------------------------------------------------------------- /script/get-cuda/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-cuda/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/CUDA-automation/get-cuda](https://docs.mlcommons.org/cm4mlops/scripts/CUDA-automation/get-cuda) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-cuda/run.bat: -------------------------------------------------------------------------------- 1 | "%CM_NVCC_BIN_WITH_PATH%" -V > tmp-ver.out 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | 4 | -------------------------------------------------------------------------------- /script/get-cudnn/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-cudnn/README-extra.md: -------------------------------------------------------------------------------- 1 | # TBD 2 | 3 | We need to add detection of cuDNN version on Windows, Linux and MacOS 4 | -------------------------------------------------------------------------------- /script/get-cudnn/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/CUDA-automation/get-cudnn](https://docs.mlcommons.org/cm4mlops/scripts/CUDA-automation/get-cudnn) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-dataset-cifar10/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-cnndm/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-cnndm/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-datasets/get-dataset-cnndm](https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-datasets/get-dataset-cnndm) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-dataset-coco/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-coco/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-datasets/get-dataset-coco](https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-datasets/get-dataset-coco) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-dataset-coco2014/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-criteo/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-imagenet-aux/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-imagenet-calibration/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-imagenet-helper/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-imagenet-train/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-imagenet-train/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | test $? -eq 0 || exit $? 4 | -------------------------------------------------------------------------------- /script/get-dataset-imagenet-val/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-kits19/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-librispeech/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-librispeech/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | wget -nc ${CM_WGET_URL} --no-check-certificate 4 | test $? -eq 0 || exit 1 5 | 6 | tar -x --skip-old-files -vf ${CM_DATASET_ARCHIVE} 7 | test $? -eq 0 || exit 1 8 | 9 | -------------------------------------------------------------------------------- /script/get-dataset-mlperf-inference-igbh/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-mlperf-inference-mixtral/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-openimages-annotations/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-openimages-calibration/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-openimages/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-openimages/README-extra.md: -------------------------------------------------------------------------------- 1 | # Ubuntu 22.04 2 | `sudo apt-get install -y libgl1-mesa-dev` 3 | -------------------------------------------------------------------------------- /script/get-dataset-openorca/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-squad-vocab/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-squad/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dataset-squad/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-datasets/get-dataset-squad](https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-datasets/get-dataset-squad) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-dlrm-data-mlperf-inference/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dlrm/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-dlrm/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-models/get-dlrm](https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-models/get-dlrm) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-docker/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-docker/install.bat: -------------------------------------------------------------------------------- 1 | echo "Please install docker to continue" 2 | exit 1 3 | -------------------------------------------------------------------------------- /script/get-docker/install.sh: -------------------------------------------------------------------------------- 1 | echo "Please install docker to continue" 2 | exit 1 3 | -------------------------------------------------------------------------------- /script/get-docker/run.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | docker --version > tmp-ver.out 3 | if %errorlevel% neq 0 exit /b 1 4 | -------------------------------------------------------------------------------- /script/get-docker/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | docker --version > tmp-ver.out 3 | test $? -eq 0 || exit 1 4 | -------------------------------------------------------------------------------- /script/get-gcc/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-gcc/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Compiler-automation/get-gcc](https://docs.mlcommons.org/cm4mlops/scripts/Compiler-automation/get-gcc) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-gcc/run.bat: -------------------------------------------------------------------------------- 1 | %CM_GCC_BIN_WITH_PATH% --version > tmp-ver.out 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | 4 | -------------------------------------------------------------------------------- /script/get-gcc/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | gcc_bin=${CM_GCC_BIN_WITH_PATH} 3 | echo "${gcc_bin} --version" 4 | 5 | ${gcc_bin} --version > tmp-ver.out 6 | test $? -eq 0 || exit 1 7 | 8 | cat tmp-ver.out 9 | -------------------------------------------------------------------------------- /script/get-generic-python-lib/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-generic-python-lib/run.bat: -------------------------------------------------------------------------------- 1 | IF NOT DEFINED CM_TMP_CURRENT_SCRIPT_PATH SET CM_TMP_CURRENT_SCRIPT_PATH=%CD% 2 | 3 | %CM_PYTHON_BIN_WITH_PATH% %CM_TMP_CURRENT_SCRIPT_PATH%\detect-version.py 4 | IF %ERRORLEVEL% NEQ 0 EXIT 1 5 | -------------------------------------------------------------------------------- /script/get-generic-python-lib/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | CM_TMP_CURRENT_SCRIPT_PATH=${CM_TMP_CURRENT_SCRIPT_PATH:-$PWD} 4 | 5 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/detect-version.py 6 | test $? -eq 0 || exit $? 7 | exit 0 8 | -------------------------------------------------------------------------------- /script/get-generic-python-lib/validate_cache.bat: -------------------------------------------------------------------------------- 1 | IF NOT DEFINED CM_TMP_CURRENT_SCRIPT_PATH SET CM_TMP_CURRENT_SCRIPT_PATH=%CD% 2 | 3 | %CM_PYTHON_BIN_WITH_PATH% %CM_TMP_CURRENT_SCRIPT_PATH%\detect-version.py 4 | IF %ERRORLEVEL% NEQ 0 EXIT 1 5 | -------------------------------------------------------------------------------- /script/get-generic-sys-util/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-gh-actions-runner/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-gh-actions-runner/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo ${CM_RUN_CMD} 4 | eval ${CM_RUN_CMD} 5 | test $? -eq 0 || exit $? 6 | -------------------------------------------------------------------------------- /script/get-git-repo/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-git-repo/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/get-git-repo](https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/get-git-repo) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-github-cli/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-github-cli/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/get-github-cli](https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/get-github-cli) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-github-cli/run.bat: -------------------------------------------------------------------------------- 1 | gh.exe --version > tmp-ver.out 2 | -------------------------------------------------------------------------------- /script/get-github-cli/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | gh --version > tmp-ver.out 3 | -------------------------------------------------------------------------------- /script/get-go/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-go/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Compiler-automation/get-go](https://docs.mlcommons.org/cm4mlops/scripts/Compiler-automation/get-go) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-go/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | go version > tmp-ver.out 3 | test $? -eq 0 || exit 1 4 | -------------------------------------------------------------------------------- /script/get-google-saxml/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-google-saxml/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-frameworks/get-google-saxml](https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-frameworks/get-google-saxml) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-google-saxml/run.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | echo TBD 4 | -------------------------------------------------------------------------------- /script/get-google-saxml/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "TBD" 4 | -------------------------------------------------------------------------------- /script/get-google-test/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ipol-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ipol-src/README-extra.md: -------------------------------------------------------------------------------- 1 | 20240127: Grigori added patch to support latest PIL 2 | -------------------------------------------------------------------------------- /script/get-java/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-java/README-extra.md: -------------------------------------------------------------------------------- 1 | # Windows 2 | 3 | ## Misc 4 | 5 | * https://jdk.java.net/java-se-ri/11 6 | * https://learn.microsoft.com/fr-fr/java/openjdk/download 7 | -------------------------------------------------------------------------------- /script/get-java/run.bat: -------------------------------------------------------------------------------- 1 | "%CM_JAVA_BIN_WITH_PATH%" -version > tmp-ver.out 2>&1 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | 4 | -------------------------------------------------------------------------------- /script/get-java/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | ${CM_JAVA_BIN_WITH_PATH} -version &> tmp-ver.out 3 | test $? -eq 0 || exit 1 4 | -------------------------------------------------------------------------------- /script/get-javac/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-javac/README-extra.md: -------------------------------------------------------------------------------- 1 | # Windows 2 | 3 | ## Misc 4 | 5 | * https://jdk.java.net/java-se-ri/11 6 | * https://learn.microsoft.com/fr-fr/java/openjdk/download 7 | -------------------------------------------------------------------------------- /script/get-javac/run.bat: -------------------------------------------------------------------------------- 1 | "%CM_JAVAC_BIN_WITH_PATH%" -version > tmp-ver.out 2>&1 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | 4 | -------------------------------------------------------------------------------- /script/get-javac/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | ${CM_JAVAC_BIN_WITH_PATH} -version &> tmp-ver.out 3 | test $? -eq 0 || exit 1 4 | -------------------------------------------------------------------------------- /script/get-lib-armnn/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-lib-dnnl/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-lib-protobuf/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-lib-qaic-api/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-lib-qaic-api/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | CUR_DIR=${PWD:-tmp} 4 | 5 | -------------------------------------------------------------------------------- /script/get-llvm/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-llvm/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Compiler-automation/get-llvm](https://docs.mlcommons.org/cm4mlops/scripts/Compiler-automation/get-llvm) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-llvm/run.bat: -------------------------------------------------------------------------------- 1 | %CM_LLVM_CLANG_BIN_WITH_PATH% --version > tmp-ver.out 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | 4 | -------------------------------------------------------------------------------- /script/get-llvm/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | clang_bin=${CM_LLVM_CLANG_BIN_WITH_PATH} 3 | ${clang_bin} --version > tmp-ver.out 4 | test $? -eq 0 || exit 1 5 | -------------------------------------------------------------------------------- /script/get-microtvm/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-microtvm/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/TinyML-automation/get-microtvm](https://docs.mlcommons.org/cm4mlops/scripts/TinyML-automation/get-microtvm) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-ml-model-3d-unet-kits19/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-bert-base-squad/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-bert-large-squad/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-dlrm-terabyte/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-dlrm-terabyte/run.sh: -------------------------------------------------------------------------------- 1 | #/bin/bash 2 | if [[ ${CM_TMP_MODEL_ADDITIONAL_NAME} ]]; then 3 | ln -s ${CM_ML_MODEL_FILE} ${CM_TMP_MODEL_ADDITIONAL_NAME} 4 | fi 5 | -------------------------------------------------------------------------------- /script/get-ml-model-efficientnet-lite/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-gptj/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-gptj/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-models/get-ml-model-gptj](https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-models/get-ml-model-gptj) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-ml-model-huggingface-zoo/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-huggingface-zoo/run.bat: -------------------------------------------------------------------------------- 1 | echo %CM_RUN_CMD% 2 | call %CM_RUN_CMD% 3 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 4 | -------------------------------------------------------------------------------- /script/get-ml-model-huggingface-zoo/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo ${CM_RUN_CMD} 3 | eval ${CM_RUN_CMD} 4 | test $? -eq 0 || exit $? 5 | -------------------------------------------------------------------------------- /script/get-ml-model-llama2/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-mixtral/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-mobilenet/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-neuralmagic-zoo/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-neuralmagic-zoo/run.bat: -------------------------------------------------------------------------------- 1 | %CM_PYTHON_BIN_WITH_PATH% %CM_TMP_CURRENT_SCRIPT_PATH%\download_sparse.py 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | -------------------------------------------------------------------------------- /script/get-ml-model-neuralmagic-zoo/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/download_sparse.py 3 | -------------------------------------------------------------------------------- /script/get-ml-model-resnet50/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-resnet50/run_config.yml: -------------------------------------------------------------------------------- 1 | docker: 2 | build: true 3 | docker_os: ubuntu 4 | docker_os_version: "22.04" 5 | 6 | run_with_default_inputs: true #if false the script won't run automatic tests 7 | -------------------------------------------------------------------------------- /script/get-ml-model-retinanet-nvidia/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-retinanet/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-rgat/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-rnnt/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-rnnt/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-models/get-ml-model-rnnt](https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-models/get-ml-model-rnnt) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-ml-model-stable-diffusion/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-tiny-resnet/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-ml-model-tiny-resnet/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo ${CM_RUN_CMD} 3 | eval ${CM_RUN_CMD} 4 | test $? -eq 0 || exit 1 5 | -------------------------------------------------------------------------------- /script/get-ml-model-tiny-resnet/run_config.yml: -------------------------------------------------------------------------------- 1 | docker: 2 | build: true 3 | docker_os: ubuntu 4 | docker_os_version: "22.04" 5 | 6 | run_with_default_inputs: true #if false the script won't run automatic tests 7 | -------------------------------------------------------------------------------- /script/get-ml-model-using-imagenet-from-model-zoo/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-intel-scratch-space/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-intel-scratch-space/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-loadgen/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-loadgen/tests/download-and-install.bat: -------------------------------------------------------------------------------- 1 | cmr "get loadgen _download" 2 | 3 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-nvidia-common-code/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-nvidia-scratch-space/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-nvidia-scratch-space/README-extra.md: -------------------------------------------------------------------------------- 1 | # CM script 2 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-nvidia-scratch-space/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-results-dir/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-results/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-submission-dir/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-sut-configs/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-sut-description/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-utils/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-inference-utils/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts//get-mlperf-inference-utils](https://docs.mlcommons.org/cm4mlops/scripts//get-mlperf-inference-utils) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-mlperf-logging/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-power-dev/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-tiny-eembc-energy-runner-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-tiny-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-training-nvidia-code/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-mlperf-training-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-nvidia-docker/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-nvidia-mitten/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-nvidia-mitten/README-extra.md: -------------------------------------------------------------------------------- 1 | TBD: compile https://github.com/NVIDIA/mitten 2 | -------------------------------------------------------------------------------- /script/get-nvidia-mitten/run.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | echo TBD 4 | -------------------------------------------------------------------------------- /script/get-nvidia-mitten/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd ${CM_NVIDIA_MITTEN_SRC} 3 | ${CM_PYTHON_BIN_WITH_PATH} -m pip install . 4 | test $? -eq 0 || exit $? 5 | -------------------------------------------------------------------------------- /script/get-onnxruntime-prebuilt/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-openssl/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-openssl/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | openssl_bin=${CM_OPENSSL_BIN_WITH_PATH} 3 | ${openssl_bin} version > tmp-ver.out 2>/dev/null 4 | test $? -eq 0 || exit 1 5 | -------------------------------------------------------------------------------- /script/get-platform-details/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-platform-details/run-macos.sh: -------------------------------------------------------------------------------- 1 | echo "WARNING: get-platform-details script is fully supported on linux systems only." 2 | -------------------------------------------------------------------------------- /script/get-platform-details/run.bat: -------------------------------------------------------------------------------- 1 | echo "WARNING: get-platform-details script is fully supported on linux systems only." 2 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-criteo/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-criteo/checksums.txt: -------------------------------------------------------------------------------- 1 | cdf7af87cbc7e9b468c0be46b1767601 day_23_dense.npy 2 | dd68f93301812026ed6f58dfb0757fa7 day_23_labels.npy 3 | c46b7e31ec6f2f8768fa60bdfc0f6e40 day_23_sparse_multi_hot.npz 4 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-criteo/run-multihot.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | CUR=$PWD 4 | echo ${CM_RUN_CMD} 5 | eval ${CM_RUN_CMD} 6 | test $? -eq 0 || exit $? 7 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-criteo/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | CUR=$PWD 4 | 5 | if [[ ${CM_CRITEO_FAKE} == "yes" ]]; then 6 | exit 0 7 | fi 8 | #${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/preprocess.py 9 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-generic/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-imagenet/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-imagenet/run.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | %CM_PYTHON_BIN% %CM_TMP_CURRENT_SCRIPT_PATH%\preprocess.py 4 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 5 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-imagenet/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [ ! -z ${CM_IMAGENET_PREPROCESSED_PATH+x} ]; then 3 | exit 0 4 | fi 5 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/preprocess.py 6 | test $? -eq 0 || exit 1 7 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-kits19/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-kits19/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cmd=${CM_TMP_CMD} 4 | echo $cmd 5 | eval $cmd 6 | test $? -eq 0 || exit $? 7 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-librispeech/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-librispeech/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cmd=${CM_TMP_CMD} 4 | echo $cmd 5 | eval $cmd 6 | test $? -eq 0 || exit $? 7 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-openimages/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-openimages/run.bat: -------------------------------------------------------------------------------- 1 | %CM_PYTHON_BIN_WITH_PATH% %CM_TMP_CURRENT_SCRIPT_PATH%\preprocess.py 2 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-openimages/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/preprocess.py 4 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-openorca/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-openorca/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd ${CM_RUN_DIR} 4 | echo "${CM_RUN_CMD}" 5 | eval "${CM_RUN_CMD}" 6 | -------------------------------------------------------------------------------- /script/get-preprocessed-dataset-squad/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-python3/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-python3/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Python-automation/get-python3](https://docs.mlcommons.org/cm4mlops/scripts/Python-automation/get-python3) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-python3/run.bat: -------------------------------------------------------------------------------- 1 | %CM_PYTHON_BIN_WITH_PATH% --version > tmp-ver.out 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | -------------------------------------------------------------------------------- /script/get-qaic-apps-sdk/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-qaic-platform-sdk/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-qaic-software-kit/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-rclone-config/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-rclone-config/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/get-rclone/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-rclone/configs/rclone.conf: -------------------------------------------------------------------------------- 1 | [cm-team] 2 | type = drive 3 | scope = drive.readonly 4 | service_account_file = 5 | team_drive = 0AN8R_ThwUNY8Uk9PVA 6 | shared_with_me = true 7 | root_folder_id = 0AN8R_ThwUNY8Uk9PVA 8 | 9 | -------------------------------------------------------------------------------- /script/get-rclone/install-system-macos.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | brew install rclone 3 | test $? -eq 0 || exit 1 4 | -------------------------------------------------------------------------------- /script/get-rclone/install-system.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sudo -v ; curl -k https://rclone.org/install.sh | sudo bash 3 | test $? -eq 0 || exit 1 4 | -------------------------------------------------------------------------------- /script/get-rclone/run.bat: -------------------------------------------------------------------------------- 1 | where rclone.exe > NUL 2>&1 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | 4 | rclone --version > tmp-ver.out 5 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 6 | -------------------------------------------------------------------------------- /script/get-rclone/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo ${PATH} 4 | 5 | if ! command -v rclone &> /dev/null 6 | then 7 | echo "rclone was not detected" 8 | exit 1 9 | fi 10 | rclone --version > tmp-ver.out 11 | test $? -eq 0 || exit 1 12 | -------------------------------------------------------------------------------- /script/get-rocm-devices/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-rocm-devices/README.md: -------------------------------------------------------------------------------- 1 | Run this script 2 | ``` 3 | cm run script --tags=get,rocm-devices 4 | ``` 5 | -------------------------------------------------------------------------------- /script/get-rocm-devices/detect.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/detect.py 4 | test $? -eq 0 || exit $? 5 | -------------------------------------------------------------------------------- /script/get-rocm/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-rocm/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-frameworks/get-rocm](https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-frameworks/get-rocm) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-rocm/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | dir="${CM_ROCM_BIN_WITH_PATH%/*}/../" 4 | cat ${dir}/.info/version > tmp-ver.out 5 | test $? -eq 0 || exit 1 6 | -------------------------------------------------------------------------------- /script/get-spec-ptd/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-sys-utils-cm/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-sys-utils-cm/requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | numpy 3 | pandas 4 | wheel 5 | giturlparse 6 | -------------------------------------------------------------------------------- /script/get-sys-utils-min/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-tensorrt/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-tensorrt/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/CUDA-automation/get-tensorrt](https://docs.mlcommons.org/cm4mlops/scripts/CUDA-automation/get-tensorrt) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-terraform/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-terraform/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Cloud-automation/get-terraform](https://docs.mlcommons.org/cm4mlops/scripts/Cloud-automation/get-terraform) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-terraform/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | terraform --version > tmp-ver.out 3 | test $? -eq 0 || exit 1 4 | -------------------------------------------------------------------------------- /script/get-tvm-model/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-tvm-model/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-models/get-tvm-model](https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-models/get-tvm-model) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-tvm-model/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cmd="${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/process.py" 4 | 5 | echo $cmd 6 | 7 | eval $cmd 8 | -------------------------------------------------------------------------------- /script/get-tvm/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-tvm/README-extra.md: -------------------------------------------------------------------------------- 1 | ```bash 2 | cm run script "get llvm" --version=14.0.0 3 | cm run script "get tvm _llvm" --version=0.10.0 4 | cm run script "python app image-classification tvm-onnx" 5 | ``` 6 | -------------------------------------------------------------------------------- /script/get-tvm/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-frameworks/get-tvm](https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-frameworks/get-tvm) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-xilinx-sdk/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-zendnn/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-zendnn/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/get-zendnn/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd ${ZENDNN_SRC_PATH} 4 | 5 | make clean 6 | test $? -eq 0 || exit $? 7 | 8 | source scripts/zendnn_build.sh gcc 9 | test $? -eq 0 || exit $? 10 | -------------------------------------------------------------------------------- /script/get-zephyr-sdk/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-zephyr-sdk/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/TinyML-automation/get-zephyr-sdk](https://docs.mlcommons.org/cm4mlops/scripts/TinyML-automation/get-zephyr-sdk) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-zephyr/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/get-zephyr/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/TinyML-automation/get-zephyr](https://docs.mlcommons.org/cm4mlops/scripts/TinyML-automation/get-zephyr) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/get-zephyr/run-ubuntu.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo apt-get install -y --no-install-recommends gcc-multilib g++-multilib libsdl2-dev 4 | . ${CM_TMP_CURRENT_SCRIPT_PATH}/run.sh 5 | -------------------------------------------------------------------------------- /script/gui/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/gui/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/GUI/gui](https://docs.mlcommons.org/cm4mlops/scripts/GUI/gui) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/gui/install/redhat.md: -------------------------------------------------------------------------------- 1 | *We have successfully tested CM on Red Hat 9 and CentOS 8:* 2 | 3 | ```bash 4 | sudo dnf update 5 | sudo dnf install python3 python-pip git wget curl 6 | 7 | ``` 8 | -------------------------------------------------------------------------------- /script/gui/run.bat: -------------------------------------------------------------------------------- 1 | streamlit run %CM_TMP_CURRENT_SCRIPT_PATH%\%CM_GUI_APP%.py %CM_GUI_EXTRA_CMD% 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | -------------------------------------------------------------------------------- /script/gui/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | streamlit run ${CM_TMP_CURRENT_SCRIPT_PATH}/${CM_GUI_APP}.py ${CM_GUI_EXTRA_CMD} 4 | test $? -eq 0 || exit $? 5 | -------------------------------------------------------------------------------- /script/gui/tests/test.cmd: -------------------------------------------------------------------------------- 1 | cm run script --tags=gui --script="app generic mlperf inference" --prefix="gnome-terminal --" -------------------------------------------------------------------------------- /script/gui/tests/test2.cmd: -------------------------------------------------------------------------------- 1 | cm run script --tags=gui --script="app generic mlperf inference" --prefix=" " -------------------------------------------------------------------------------- /script/gui/tests/test3.cmd: -------------------------------------------------------------------------------- 1 | cm run script --tags=gui --script="run mlperf inference generate-run-cmds" --prefix="gnome-terminal --" -------------------------------------------------------------------------------- /script/gui/tests/test4.cmd: -------------------------------------------------------------------------------- 1 | cm run script --tags=gui,_graph 2 | -------------------------------------------------------------------------------- /script/gui/tests/test4a.cmd: -------------------------------------------------------------------------------- 1 | cm run script --tags=gui,_graph --exp_tags=test 2 | 3 | -------------------------------------------------------------------------------- /script/gui/tests/test4b.cmd: -------------------------------------------------------------------------------- 1 | cm run script --tags=gui,_graph --exp_name=mlperf-inference--all--datacenter--closed--image-classification--server 2 | 3 | -------------------------------------------------------------------------------- /script/gui/tests/test5.cmd: -------------------------------------------------------------------------------- 1 | cm run script "gui _playground" 2 | -------------------------------------------------------------------------------- /script/import-mlperf-inference-to-experiment/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/import-mlperf-tiny-to-experiment/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/import-mlperf-training-to-experiment/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-apt-package/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-apt-package/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cmd=${CM_APT_INSTALL_CMD} 4 | echo $cmd 5 | eval $cmd 6 | test $? -eq 0 || exit $? 7 | -------------------------------------------------------------------------------- /script/install-aws-cli/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-aws-cli/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Cloud-automation/install-aws-cli](https://docs.mlcommons.org/cm4mlops/scripts/Cloud-automation/install-aws-cli) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/install-bazel/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-cmake-prebuilt/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-cuda-package-manager/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-cuda-package-manager/run-ubuntu.sh: -------------------------------------------------------------------------------- 1 | sudo apt-get install nvidia-cuda-toolkit 2 | -------------------------------------------------------------------------------- /script/install-cuda-package-manager/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | CUR=${PWD} 3 | echo "Package installation script not available yet for ${CM_HOST_OS_FLAVOR}" 4 | exit 1 5 | 6 | -------------------------------------------------------------------------------- /script/install-cuda-prebuilt/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-cuda-prebuilt/README-extra.md: -------------------------------------------------------------------------------- 1 | # Notes 2 | 3 | This script is in a prototyping alpha stage. Needs to be considerably updated and unified! 4 | 5 | -------------------------------------------------------------------------------- /script/install-diffusers-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-gcc-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-generic-conda-package/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-generic-conda-package/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | cmd="${CM_CONDA_PKG_INSTALL_CMD}" 5 | echo $cmd 6 | eval $cmd 7 | test $? -eq 0 || exit $? 8 | -------------------------------------------------------------------------------- /script/install-gflags-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-gflags/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-github-cli/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-github-cli/run-macos.sh: -------------------------------------------------------------------------------- 1 | brew install gh 2 | -------------------------------------------------------------------------------- /script/install-github-cli/run-rhel.sh: -------------------------------------------------------------------------------- 1 | sudo dnf install -y 'dnf-command(config-manager)' 2 | sudo dnf config-manager --add-repo https://cli.github.com/packages/rpm/gh-cli.repo 3 | sudo dnf install -y gh 4 | -------------------------------------------------------------------------------- /script/install-github-cli/run.bat: -------------------------------------------------------------------------------- 1 | choco install gh 2 | -------------------------------------------------------------------------------- /script/install-intel-neural-speed-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-ipex-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-llvm-prebuilt/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-llvm-prebuilt/run.bat: -------------------------------------------------------------------------------- 1 | echo Running %CM_LLVM_PACKAGE% ... 2 | 3 | %CM_LLVM_PACKAGE% --help 4 | -------------------------------------------------------------------------------- /script/install-llvm-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-mlperf-logging-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-nccl-libs/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-nccl-libs/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts//install-nccl-libs](https://docs.mlcommons.org/cm4mlops/scripts//install-nccl-libs) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/install-nccl-libs/run-ubuntu.sh: -------------------------------------------------------------------------------- 1 | CM_SUDO=${CM_SUDO:-sudo} 2 | ${CM_SUDO} apt install -y --allow-downgrades libnccl2=2.18.3-1+cuda${CM_CUDA_VERSION} libnccl-dev=2.18.3-1+cuda${CM_CUDA_VERSION} 3 | -------------------------------------------------------------------------------- /script/install-numactl-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-onednn-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-onnxruntime-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-opencv-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-openssl/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-pip-package-for-cmind-python/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-python-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-python-venv/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-python-venv/run.bat: -------------------------------------------------------------------------------- 1 | %CM_PYTHON_BIN_WITH_PATH% -m pip install virtualenv 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | 4 | %CM_PYTHON_BIN_WITH_PATH% -m venv %CM_VIRTUAL_ENV_DIR% 5 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 6 | -------------------------------------------------------------------------------- /script/install-pytorch-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-pytorch-kineto-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-qaic-compute-sdk-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-rapidjson-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-rocm/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-rocm/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-frameworks/install-rocm](https://docs.mlcommons.org/cm4mlops/scripts/AI-ML-frameworks/install-rocm) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/install-rocm/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | -------------------------------------------------------------------------------- /script/install-tensorflow-for-c/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-tensorflow-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-terraform-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-tflite-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-torchvision-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-tpp-pytorch-extension/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/install-transformers-from-src/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/launch-benchmark/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/launch-benchmark/README-extra.md: -------------------------------------------------------------------------------- 1 | # CM script 2 | 3 | Universal benchmark launcher via Collective Mind 4 | -------------------------------------------------------------------------------- /script/launch-benchmark/tests/debug.py: -------------------------------------------------------------------------------- 1 | import cmind 2 | 3 | r = cmind.access({'action': 'gui', 4 | 'automation': 'script', 5 | 'artifact': 'launch benchmark'}) 6 | print(r) 7 | -------------------------------------------------------------------------------- /script/plug-prebuilt-cudnn-to-cuda/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/plug-prebuilt-cudnn-to-cuda/README-extra.md: -------------------------------------------------------------------------------- 1 | Useful info: 2 | * https://medium.com/@yushantripleseven/managing-multiple-cuda-cudnn-installations-ba9cdc5e2654 3 | -------------------------------------------------------------------------------- /script/plug-prebuilt-cusparselt-to-cuda/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/prepare-training-data-bert/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/prepare-training-data-resnet/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/preprocess-mlperf-inference-submission/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/preprocess-mlperf-inference-submission/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cmd=${CM_RUN_CMD} 3 | echo "${cmd}" 4 | eval "${cmd}" 5 | test $? -eq 0 || exit $? 6 | -------------------------------------------------------------------------------- /script/print-any-text/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/print-any-text/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-any-text](https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-any-text) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/print-any-text/run.bat: -------------------------------------------------------------------------------- 1 | if "%CM_PRINT_ANY_TEXT%" == "" ( 2 | echo. 3 | ) else ( 4 | echo %CM_PRINT_ANY_TEXT% 5 | ) 6 | -------------------------------------------------------------------------------- /script/print-any-text/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "${CM_PRINT_ANY_TEXT}" 4 | -------------------------------------------------------------------------------- /script/print-croissant-desc/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/print-croissant-desc/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-croissant-desc](https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-croissant-desc) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/print-croissant-desc/run.bat: -------------------------------------------------------------------------------- 1 | %CM_PYTHON_BIN_WITH_PATH% %CM_TMP_CURRENT_SCRIPT_PATH%\code.py 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | -------------------------------------------------------------------------------- /script/print-croissant-desc/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/code.py 4 | test $? -eq 0 || exit $? 5 | -------------------------------------------------------------------------------- /script/print-hello-world-java/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/print-hello-world-java/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-hello-world-java](https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-hello-world-java) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/print-hello-world-java/run.bat: -------------------------------------------------------------------------------- 1 | echo %CM_JAVA_BIN_WITH_PATH% 2 | 3 | %CM_JAVA_BIN_WITH_PATH% %CM_TMP_CURRENT_SCRIPT_PATH%\code.java 4 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 5 | -------------------------------------------------------------------------------- /script/print-hello-world-java/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | which ${CM_JAVA_BIN_WITH_PATH} 4 | 5 | ${CM_JAVA_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/code.java 6 | test $? -eq 0 || exit $? 7 | -------------------------------------------------------------------------------- /script/print-hello-world-javac/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/print-hello-world-javac/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-hello-world-javac](https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-hello-world-javac) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/print-hello-world-py/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/print-hello-world-py/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-hello-world-py](https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-hello-world-py) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/print-hello-world/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/print-hello-world/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-hello-world](https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-hello-world) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/print-python-version/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/print-python-version/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-python-version](https://docs.mlcommons.org/cm4mlops/scripts/Tests/print-python-version) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/print-python-version/run.bat: -------------------------------------------------------------------------------- 1 | echo. 2 | 3 | echo CM_PYTHON_BIN = %CM_PYTHON_BIN% 4 | echo CM_PYTHON_BIN_WITH_PATH = %CM_PYTHON_BIN_WITH_PATH% 5 | 6 | echo . 7 | 8 | %CM_PYTHON_BIN_WITH_PATH% --version 9 | -------------------------------------------------------------------------------- /script/process-ae-users/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/process-ae-users/run.bat: -------------------------------------------------------------------------------- 1 | %CM_PYTHON_BIN_WITH_PATH% %CM_TMP_CURRENT_SCRIPT_PATH%\code.py 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | -------------------------------------------------------------------------------- /script/process-ae-users/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/code.py 4 | test $? -eq 0 || exit $? 5 | -------------------------------------------------------------------------------- /script/process-mlperf-accuracy/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/process-mlperf-accuracy/run.bat: -------------------------------------------------------------------------------- 1 | echo Running command: 2 | echo. 3 | echo %CM_RUN_CMDS% 4 | echo. 5 | 6 | %CM_RUN_CMDS% 7 | 8 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 9 | -------------------------------------------------------------------------------- /script/process-mlperf-accuracy/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | IFS="??" read -r -a cmd_array <<< "$CM_RUN_CMDS" 4 | for cmd in "${cmd_array[@]}" 5 | do 6 | echo "${cmd}" 7 | eval ${cmd} 8 | test $? -eq 0 || exit 1 9 | done 10 | -------------------------------------------------------------------------------- /script/prune-bert-models/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/prune-bert-models/README-extra.md: -------------------------------------------------------------------------------- 1 | Moved [here](https://github.com/ctuning/cm4research/blob/main/script/reproduce-neurips-paper-2022-arxiv-2204.09656/README-extra.md). 2 | -------------------------------------------------------------------------------- /script/prune-docker/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/prune-docker/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Docker-automation/prune-docker](https://docs.mlcommons.org/cm4mlops/scripts/Docker-automation/prune-docker) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/prune-docker/_cm.yaml: -------------------------------------------------------------------------------- 1 | alias: prune-docker 2 | automation_alias: script 3 | automation_uid: 5b4e0237da074764 4 | category: Docker automation 5 | tags: 6 | - prune 7 | - docker 8 | uid: 27ead88809bb4d4e 9 | -------------------------------------------------------------------------------- /script/prune-docker/run.bat: -------------------------------------------------------------------------------- 1 | docker system prune -a --volumes 2 | -------------------------------------------------------------------------------- /script/prune-docker/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker system prune -a --volumes 4 | -------------------------------------------------------------------------------- /script/publish-results-to-dashboard/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/publish-results-to-dashboard/run.bat: -------------------------------------------------------------------------------- 1 | %CM_PYTHON_BIN_WITH_PATH% %CM_TMP_CURRENT_SCRIPT_PATH%\code.py 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | -------------------------------------------------------------------------------- /script/publish-results-to-dashboard/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # For now login to WANDB anonymously 4 | wandb login --anonymously --relogin 5 | 6 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/code.py 7 | test $? -eq 0 || exit $? 8 | -------------------------------------------------------------------------------- /script/pull-git-repo/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/pull-git-repo/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/pull-git-repo](https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/pull-git-repo) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/push-csv-to-spreadsheet/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/push-csv-to-spreadsheet/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/google_api.py 4 | -------------------------------------------------------------------------------- /script/push-mlperf-inference-results-to-github/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/remote-run-commands/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/remote-run-commands/README-extra.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/script/remote-run-commands/README-extra.md -------------------------------------------------------------------------------- /script/remote-run-commands/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/remote-run-commands/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cmd=$CM_SSH_CMD 3 | echo $cmd 4 | eval $cmd 5 | -------------------------------------------------------------------------------- /script/reproduce-ieee-acm-micro2023-paper-22/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/reproduce-ieee-acm-micro2023-paper-28/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/reproduce-ieee-acm-micro2023-paper-33/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/reproduce-ieee-acm-micro2023-paper-38/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/reproduce-ieee-acm-micro2023-paper-5/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/reproduce-ieee-acm-micro2023-paper-5/install_deps.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | 3 | echo "Windows is not supported yet" 4 | exit /b 1 5 | -------------------------------------------------------------------------------- /script/reproduce-ieee-acm-micro2023-paper-5/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | 3 | echo "Windows is not supported yet" 4 | exit /b 1 5 | -------------------------------------------------------------------------------- /script/reproduce-ieee-acm-micro2023-paper-8/.gitignore: -------------------------------------------------------------------------------- 1 | *.swp 2 | -------------------------------------------------------------------------------- /script/reproduce-ieee-acm-micro2023-paper-8/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/reproduce-ieee-acm-micro2023-paper-85/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/reproduce-ieee-acm-micro2023-paper-87/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/reproduce-ieee-acm-micro2023-paper-96/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/reproduce-ipol-paper-2022-439/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/reproduce-ipol-paper-2022-439/requirements.txt: -------------------------------------------------------------------------------- 1 | jupyter 2 | numpy 3 | imageio 4 | IPython 5 | scikit-image 6 | -------------------------------------------------------------------------------- /script/reproduce-mlperf-octoml-tinyml-results/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/reproduce-mlperf-training-nvidia/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/reproduce-mlperf-training-nvidia/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [[ ${CM_CALL_MLPERF_RUNNER} == "no" ]]; then 3 | cd ${CM_RUN_DIR} 4 | cmd=${CM_RUN_CMD} 5 | echo "${cmd}" 6 | eval "${cmd}" 7 | test $? -eq 0 || exit $? 8 | fi 9 | -------------------------------------------------------------------------------- /script/run-all-mlperf-models/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/run-docker-container/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-app/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-app/faq/ctuning-cpp-tflite.md: -------------------------------------------------------------------------------- 1 | # cTuning TFLite C++ implementation of MLPerf inference 2 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-app/faq/deepsparse.md: -------------------------------------------------------------------------------- 1 | # FAQ: MLPerf inference with DeepSparse backend 2 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-app/faq/intel.md: -------------------------------------------------------------------------------- 1 | # FAQ: Intel implementation of MLPerf inference 2 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-app/faq/mlcommons-cpp.md: -------------------------------------------------------------------------------- 1 | # FAQ: MLCommons C++ implementation of MLPerf inference 2 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-app/faq/mlcommons-python.md: -------------------------------------------------------------------------------- 1 | # MLCommons reference implementation of MLPerf inference 2 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-app/faq/nvidia.md: -------------------------------------------------------------------------------- 1 | # FAQ: Nvidia implementation of MLPerf inference 2 | 3 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-app/faq/qualcomm.md: -------------------------------------------------------------------------------- 1 | # FAQ: Qualcomm implementation of MLPerf inference 2 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-app/modular-cm-containers/run.bat: -------------------------------------------------------------------------------- 1 | call _common.bat 2 | 3 | docker run -it %CM_DOCKER_ORG%/%CM_DOCKER_NAME%:%CM_OS_NAME%-%CM_OS_VERSION% 4 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-app/modular-cm-containers/run.sh: -------------------------------------------------------------------------------- 1 | . ./_common.sh 2 | 3 | docker run -it ${CM_DOCKER_ORG}/${CM_DOCKER_NAME}:${CM_OS_NAME}-${CM_OS_VERSION} 4 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-app/setup/b-deepsparse.md: -------------------------------------------------------------------------------- 1 | DeepSparse backend 2 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-app/setup/i-intel.md: -------------------------------------------------------------------------------- 1 | CM can run Intel's MLPerf inference benchmark implementation either natively or inside a container. 2 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-mobilenet-models/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-mobilenet-models/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | -------------------------------------------------------------------------------- /script/run-mlperf-inference-submission-checker/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/run-mlperf-power-client/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/run-mlperf-power-server/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/run-mlperf-power-server/run.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | echo %RUN_CMD% 4 | 5 | %RUN_CMD% 6 | 7 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 8 | -------------------------------------------------------------------------------- /script/run-mlperf-power-server/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cmd=${RUN_CMD} 3 | echo $cmd 4 | eval $cmd 5 | test $? -eq 0 || exit $? 6 | -------------------------------------------------------------------------------- /script/run-mlperf-training-submission-checker/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/run-python/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/run-python/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/run-python](https://docs.mlcommons.org/cm4mlops/scripts/Tests/run-python) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/run-python/run.bat: -------------------------------------------------------------------------------- 1 | %CM_PYTHON_BIN_WITH_PATH% %CM_RUN_PYTHON_CMD% 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | -------------------------------------------------------------------------------- /script/run-python/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_RUN_PYTHON_CMD} 4 | test $? -eq 0 || exit $? 5 | -------------------------------------------------------------------------------- /script/run-terraform/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/run-terraform/README-extra.md: -------------------------------------------------------------------------------- 1 | Please copy aws/credentials.example to aws/credentials.sh file after adding your AWS credentials 2 | -------------------------------------------------------------------------------- /script/run-terraform/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Cloud-automation/run-terraform](https://docs.mlcommons.org/cm4mlops/scripts/Cloud-automation/run-terraform) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/run-terraform/aws/apply_credentials.sh: -------------------------------------------------------------------------------- 1 | export TF_VAR_ACCESS_KEY=$AWS_ACCESS_KEY_ID 2 | export TF_VAR_SECRET_KEY=$AWS_SECRET_ACCESS_KEY 3 | export TF_VAR_TOKEN=$AWS_SESSION_TOKEN 4 | -------------------------------------------------------------------------------- /script/run-terraform/gcp/apply_credentials.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/script/run-terraform/gcp/apply_credentials.sh -------------------------------------------------------------------------------- /script/run-vllm-server/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/run-vllm-server/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo ${CM_VLLM_RUN_CMD} 4 | 5 | ${CM_VLLM_RUN_CMD} 6 | test $? -eq 0 || exit 1 7 | -------------------------------------------------------------------------------- /script/runtime-system-infos/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/save-mlperf-inference-implementation-state/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/set-device-settings-qaic/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/set-echo-off-win/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/set-performance-mode/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/set-performance-mode/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/set-sqlite-dir/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/set-sqlite-dir/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/set-sqlite-dir](https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/set-sqlite-dir) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/set-sqlite-dir/code.py: -------------------------------------------------------------------------------- 1 | import sqlite3 2 | -------------------------------------------------------------------------------- /script/set-sqlite-dir/run.bat: -------------------------------------------------------------------------------- 1 | %CM_PYTHON_BIN_WITH_PATH% %CM_TMP_CURRENT_SCRIPT_PATH%\code.py 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | -------------------------------------------------------------------------------- /script/set-sqlite-dir/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ${CM_PYTHON_BIN_WITH_PATH} ${CM_TMP_CURRENT_SCRIPT_PATH}/code.py 4 | test $? -eq 0 || exit $? 5 | -------------------------------------------------------------------------------- /script/set-user-limits/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/set-venv/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/set-venv/README-extra.md: -------------------------------------------------------------------------------- 1 | # Examples 2 | 3 | ```bash 4 | cmr "set venv" mlperf-test 5 | cmr "set venv" mlperf-test2 --python=/usr/bin/python3 6 | ``` 7 | -------------------------------------------------------------------------------- /script/set-venv/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts//set-venv](https://docs.mlcommons.org/cm4mlops/scripts//set-venv) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/tar-my-folder/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/tar-my-folder/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/tar-my-folder](https://docs.mlcommons.org/cm4mlops/scripts/DevOps-automation/tar-my-folder) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/test-cm-core/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/test-cm-core/README-extra.md: -------------------------------------------------------------------------------- 1 | # CM script 2 | -------------------------------------------------------------------------------- /script/test-cm-core/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/test-cm-core](https://docs.mlcommons.org/cm4mlops/scripts/Tests/test-cm-core) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/test-cm-core/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | 3 | echo "TBD" 4 | -------------------------------------------------------------------------------- /script/test-cm-script-pipeline/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/test-cm-script-pipeline/README-extra.md: -------------------------------------------------------------------------------- 1 | # CM script 2 | 3 | This script prints internal pipeline run: 4 | 5 | ```bash 6 | cmr "test cm-script pipeline" 7 | ``` 8 | 9 | -------------------------------------------------------------------------------- /script/test-cm-script-pipeline/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/test-cm-script-pipeline](https://docs.mlcommons.org/cm4mlops/scripts/Tests/test-cm-script-pipeline) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/test-cm-script-pipeline/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | 3 | echo. 4 | echo run.bat 5 | echo. 6 | -------------------------------------------------------------------------------- /script/test-cm-script-pipeline/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "" 4 | echo "run.sh" 5 | echo "" 6 | -------------------------------------------------------------------------------- /script/test-cm-script-pipeline/run2.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | 3 | echo. 4 | echo run2.bat 5 | echo. 6 | -------------------------------------------------------------------------------- /script/test-cm-script-pipeline/run2.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "" 4 | echo "run2.sh" 5 | echo "" 6 | -------------------------------------------------------------------------------- /script/test-cm-scripts/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/test-cm-scripts/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/test-debug/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/test-debug/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/CM-interface-prototyping/test-debug](https://docs.mlcommons.org/cm4mlops/scripts/CM-interface-prototyping/test-debug) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/test-debug/_demo.py: -------------------------------------------------------------------------------- 1 | # Developer: Grigori Fursin 2 | 3 | import cmind 4 | import sys 5 | 6 | print(sys.executable) 7 | 8 | r = cmind.access('run script "test cm-debug"') 9 | print(r) 10 | -------------------------------------------------------------------------------- /script/test-deps-conditions/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/test-deps-conditions/README-extra.md: -------------------------------------------------------------------------------- 1 | ```bash 2 | cmr "test deps conditions" 3 | cmr "test deps conditions" --test1 4 | cmr "test deps conditions" --test1 --test2 5 | cmr "test deps conditions" --test1 --test2 --test3 6 | ``` 7 | -------------------------------------------------------------------------------- /script/test-deps-conditions/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/test-deps-conditions](https://docs.mlcommons.org/cm4mlops/scripts/Tests/test-deps-conditions) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/test-deps-conditions2/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/test-deps-conditions2/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/test-deps-conditions2](https://docs.mlcommons.org/cm4mlops/scripts/Tests/test-deps-conditions2) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/test-download-and-extract-artifacts/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/test-download-and-extract-artifacts/README-extra.md: -------------------------------------------------------------------------------- 1 | # CM script 2 | -------------------------------------------------------------------------------- /script/test-download-and-extract-artifacts/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/test-dummy/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/test-dummy/README-extra.md: -------------------------------------------------------------------------------- 1 | # CM script 2 | -------------------------------------------------------------------------------- /script/test-dummy/_cm.yaml: -------------------------------------------------------------------------------- 1 | alias: test-dummy 2 | uid: 3ef5d69f929349bc 3 | 4 | automation_alias: script 5 | automation_uid: 5b4e0237da074764 6 | 7 | cache: false 8 | 9 | tags: 10 | - test 11 | - dummy 12 | -------------------------------------------------------------------------------- /script/test-dummy/run.bat: -------------------------------------------------------------------------------- 1 | rem native script 2 | -------------------------------------------------------------------------------- /script/test-dummy/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | -------------------------------------------------------------------------------- /script/test-mlperf-inference-retinanet/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/test-set-sys-user-cm/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/test-set-sys-user-cm/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/test-set-sys-user-cm](https://docs.mlcommons.org/cm4mlops/scripts/Tests/test-set-sys-user-cm) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/truncate-mlperf-inference-accuracy-log/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/truncate-mlperf-inference-accuracy-log/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cmd=${CM_RUN_CMD} 3 | echo "${cmd}" 4 | eval "${cmd}" 5 | test $? -eq 0 || exit $? 6 | -------------------------------------------------------------------------------- /script/upgrade-python-pip/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/upgrade-python-pip/README.md: -------------------------------------------------------------------------------- 1 | Please see [https://docs.mlcommons.org/cm4mlops/scripts/Tests/upgrade-python-pip](https://docs.mlcommons.org/cm4mlops/scripts/Tests/upgrade-python-pip) for the documentation of this CM script. 2 | -------------------------------------------------------------------------------- /script/upgrade-python-pip/run.bat: -------------------------------------------------------------------------------- 1 | %CM_PYTHON_BIN_WITH_PATH% -m pip install --upgrade pip 2 | IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% 3 | -------------------------------------------------------------------------------- /script/upgrade-python-pip/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ${CM_PYTHON_BIN_WITH_PATH} -m pip install --upgrade pip 4 | test $? -eq 0 || exit $? 5 | -------------------------------------------------------------------------------- /script/wrapper-reproduce-octoml-tinyml-submission/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | © 2022-2025 MLCommons. All Rights Reserved. 2 | 3 | Grigori Fursin, the cTuning foundation and OctoML donated the CK and CM projects to MLCommons to benefit everyone. 4 | -------------------------------------------------------------------------------- /script/wrapper-reproduce-octoml-tinyml-submission/run.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mlcommons/cm4mlops/e1d5c7cf04d189531dd6c22dd1176100c94608c1/script/wrapper-reproduce-octoml-tinyml-submission/run.sh --------------------------------------------------------------------------------