From a015e7d2ca666aab4b1615edeb81e72852b0c803 Mon Sep 17 00:00:00 2001 From: Arjun Date: Sat, 16 Aug 2025 08:50:22 +0530 Subject: [PATCH 1/4] Support more configs for jemalloc --- script/get-lib-jemalloc/customize.py | 10 +++++++++- script/get-lib-jemalloc/meta.yaml | 9 +++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/script/get-lib-jemalloc/customize.py b/script/get-lib-jemalloc/customize.py index 0fb3b7de8..e5a8439f4 100644 --- a/script/get-lib-jemalloc/customize.py +++ b/script/get-lib-jemalloc/customize.py @@ -1,8 +1,8 @@ +from utils import * from mlc import utils import os import subprocess - def preprocess(i): env = i['env'] @@ -15,6 +15,14 @@ def preprocess(i): configure_command += f""" --with-lg-quantum={env['MLC_JEMALLOC_LG_QUANTUM']} """ if env.get('MLC_JEMALLOC_LG_PAGE', '') != '': configure_command += f""" --with-lg-page={env['MLC_JEMALLOC_LG_PAGE']} """ + + if is_true(env.get('MLC_JEMALLOC_STATS')): + configure_command += " --enable-stats " + + if is_true(env.get('MLC_JEMALLOC_PROF')): + configure_command += " --enable-prof " + + if env.get('MLC_JEMALLOC_CONFIG', '') != '': configure_command += f""" {env['MLC_JEMALLOC_CONFIG'].replace("'", "")} """ diff --git a/script/get-lib-jemalloc/meta.yaml b/script/get-lib-jemalloc/meta.yaml index a856e9e4b..10c73be91 100644 --- a/script/get-lib-jemalloc/meta.yaml +++ b/script/get-lib-jemalloc/meta.yaml @@ -65,9 +65,18 @@ variations: config.#: env: MLC_JEMALLOC_CONFIG: '#' + enable-stats: + env: + MLC_JEMALLOC_STATS: true + enable-prof: + env: + MLC_JEMALLOC_PROF: true lg-page.#: env: MLC_JEMALLOC_LG_PAGE: '#' + lg-hugepage.#: + env: + MLC_JEMALLOC_LG_PAGE: '#' lg-quantum.#: env: MLC_JEMALLOC_LG_QUANTUM: '#' From 0eb254e73fc320c890e240157acc1058d33d2096 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sat, 16 Aug 2025 03:20:38 +0000 Subject: [PATCH 2/4] [Automated Commit] Format Codebase [skip ci] --- script/get-lib-jemalloc/customize.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/script/get-lib-jemalloc/customize.py b/script/get-lib-jemalloc/customize.py index e5a8439f4..4771bc813 100644 --- a/script/get-lib-jemalloc/customize.py +++ b/script/get-lib-jemalloc/customize.py @@ -3,6 +3,7 @@ import os import subprocess + def preprocess(i): env = i['env'] @@ -18,11 +19,10 @@ def preprocess(i): if is_true(env.get('MLC_JEMALLOC_STATS')): configure_command += " --enable-stats " - + if is_true(env.get('MLC_JEMALLOC_PROF')): configure_command += " --enable-prof " - if env.get('MLC_JEMALLOC_CONFIG', '') != '': configure_command += f""" {env['MLC_JEMALLOC_CONFIG'].replace("'", "")} """ From d03e129d24209401eb017046648920e85b38980c Mon Sep 17 00:00:00 2001 From: Arjun Suresh Date: Sun, 17 Aug 2025 22:16:09 +0530 Subject: [PATCH 3/4] Fixes for help, support --all in search, update some outdated READMEs --- automation/script/help.py | 29 ++- automation/script/module.py | 2 + .../README.md | 10 +- script/app-mlperf-inference-nvidia/README.md | 212 +++++++++++++++++- script/app-mlperf-inference/README.md | 10 +- .../README.md | 6 +- script/download-file/README.md | 8 +- .../README.md | 6 +- script/get-cache-dir/README.md | 6 +- script/get-generic-sys-util/README.md | 7 +- script/get-git-repo/README.md | 6 +- script/get-ipol-src/meta.yaml | 6 +- script/get-lib-jemalloc/README.md | 49 +++- .../get-ml-model-abtf-ssd-pytorch/README.md | 6 +- script/get-ml-model-deeplabv3_plus/README.md | 6 +- script/get-ml-model-resnet50/README.md | 6 +- script/get-oneapi/README.md | 43 +++- script/get-spec-ptd/meta.yaml | 3 +- script/get-tensorrt/meta.yaml | 7 +- .../README.md | 10 +- script/submit-mlperf-results/README.md | 6 +- 21 files changed, 379 insertions(+), 65 deletions(-) diff --git a/automation/script/help.py b/automation/script/help.py index 9c2212931..f4d9ab3e5 100644 --- a/automation/script/help.py +++ b/automation/script/help.py @@ -32,20 +32,25 @@ def display_help(self_module, input_params): scripts_list = search_result['list'] if not scripts_list: - return {'return': 1, 'error': 'No scripts were found'} - generic_inputs = self_module.input_flags_converted_to_env + print("") + print("Please use script tags or alias/uid to get help for a specific script") + print("") + print("Generic Inputs for all Scripts:") + print("") + print_input_descriptions(generic_inputs) - # Step 4: Iterate over scripts and generate help output - for script in sorted(scripts_list, key=lambda x: x.meta.get('alias', '')): - metadata = script.meta - script_path = script.path - print_script_help( - metadata, - script_path, - generic_inputs, - env, - self_module) + else: + # Step 4: Iterate over scripts and generate help output + for script in sorted(scripts_list, key=lambda x: x.meta.get('alias', '')): + metadata = script.meta + script_path = script.path + print_script_help( + metadata, + script_path, + generic_inputs, + env, + self_module) return {'return': 0} diff --git a/automation/script/module.py b/automation/script/module.py index 827b7f8cc..298a6e2bc 100644 --- a/automation/script/module.py +++ b/automation/script/module.py @@ -2870,6 +2870,8 @@ def search(self, i): if i.get(key): ii[key] = i[key] + if i.get('all'): + ii['all'] = i['all'] r = super(ScriptAutomation, self).search(ii) if r['return'] > 0: return r diff --git a/script/app-mlperf-inference-mlcommons-python/README.md b/script/app-mlperf-inference-mlcommons-python/README.md index adefcc231..58bf4738e 100644 --- a/script/app-mlperf-inference-mlcommons-python/README.md +++ b/script/app-mlperf-inference-mlcommons-python/README.md @@ -1,10 +1,10 @@ # README for app-mlperf-inference-mlcommons-python This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. -`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/user`, you can do +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do ``` -mkdir /mnt/user/MLC -ln -s /mnt/user/MLC $HOME/MLC +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC ``` You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. @@ -136,7 +136,7 @@ mlcr app,vision,language,mlcommons,mlperf,inference,reference,ref - `llama2-70b-99` (base: llama2-70b_) - `llama2-70b-99.9` (base: llama2-70b_) - `llama3_1-405b` -- `llama3_1-8b` +- `llama3_1-8b_` - `mixtral-8x7b` - `pointpainting` - `resnet50` (default) @@ -166,6 +166,8 @@ mlcr app,vision,language,mlcommons,mlperf,inference,reference,ref - `dlrm-v2_` - `gptj_` - `llama2-70b_` +- `llama3_1-8b` (base: llama3_1-8b_) +- `llama3_1-8b-edge` (base: llama3_1-8b_) - `multistream` - `offline` - `r2.1_default` diff --git a/script/app-mlperf-inference-nvidia/README.md b/script/app-mlperf-inference-nvidia/README.md index 9075672d8..261cf91ca 100644 --- a/script/app-mlperf-inference-nvidia/README.md +++ b/script/app-mlperf-inference-nvidia/README.md @@ -1,13 +1,96 @@ # README for app-mlperf-inference-nvidia -This README is automatically generated. Add custom content in [info.txt](info.txt). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. +This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do +``` +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC +``` +You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. + +## Setup + +If you are not on a Python development environment please refer to the [official docs](https://docs.mlcommons.org/mlcflow/install/) for the installation. + +```bash +python3 -m venv mlcflow +. mlcflow/bin/activate +pip install mlcflow +``` + +- Using a virtual environment is recommended (per `pip` best practices), but you may skip it or use `--break-system-packages` if needed. + +### Pull mlperf-automations + +Once `mlcflow` is installed: + +```bash +mlc pull repo mlcommons@mlperf-automations --pat= +``` +- `--pat` or `--ssh` is only needed if the repo is PRIVATE +- If `--pat` is avoided, you'll be asked to enter the password where you can enter your Private Access Token +- `--ssh` option can be used instead of `--pat=<>` option if you prefer to use SSH for accessing the github repository. ## Run Commands ```bash mlcr reproduce,mlcommons,mlperf,inference,harness,nvidia-harness,nvidia ``` -No script specific inputs +### Script Inputs + +| Name | Description | Choices | Default | +|------|-------------|---------|------| +| `--count` | | | `` | +| `--max_batchsize` | | | `` | +| `--mlperf_conf` | | | `` | +| `--mode` | | | `performance` | +| `--output_dir` | | | `` | +| `--scenario` | | | `Offline` | +| `--user_conf` | | | `` | +| `--devices` | | | `` | +| `--skip_preprocess` | | | `no` | +| `--skip_preprocessing` | Alias for skip_preprocess | | `` | +| `--target_qps` | | | `` | +| `--offline_target_qps` | | | `` | +| `--server_target_qps` | | | `` | +| `--target_latency` | | | `` | +| `--singlestream_target_latency` | | | `` | +| `--multistream_target_latency` | | | `` | +| `--use_triton` | | | `` | +| `--gpu_copy_streams` | | | `` | +| `--gpu_inference_streams` | | | `` | +| `--gpu_batch_size` | | | `` | +| `--dla_copy_streams` | | | `` | +| `--dla_inference_streams` | | | `` | +| `--dla_batch_size` | | | `` | +| `--input_format` | | | `` | +| `--performance_sample_count` | | | `` | +| `--workspace_size` | | | `` | +| `--log_dir` | | | `` | +| `--use_graphs` | | | `` | +| `--run_infer_on_copy_streams` | | | `` | +| `--start_from_device` | | | `` | +| `--end_on_device` | | | `` | +| `--max_dlas` | | | `` | +| `--power_setting` | | | `` | +| `--make_cmd` | | | `` | +| `--rerun` | | | `` | +| `--extra_run_options` | | | `` | +| `--use_deque_limit` | | | `` | +| `--deque_timeout_usec` | | | `` | +| `--use_cuda_thread_per_device` | | | `` | +| `--num_warmups` | | | `` | +| `--graphs_max_seqlen` | | | `` | +| `--num_issue_query_threads` | | | `` | +| `--soft_drop` | | | `` | +| `--use_small_tile_gemm_plugin` | | | `` | +| `--audio_buffer_num_lines` | | | `` | +| `--use_fp8` | | | `` | +| `--enable_sort` | | | `` | +| `--num_sort_segments` | | | `` | +| `--skip_postprocess` | | | `` | +| `--embedding_weights_on_gpu_part` | | | `` | +| `--sdxl_batcher_time_limit` | | | `` | ### Generic Script Inputs | Name | Description | Choices | Default | @@ -26,3 +109,128 @@ No script specific inputs | `--gh_token` | Github Token | | `` | | `--hf_token` | Huggingface Token | | `` | | `--verify_ssl` | Verify SSL | | `False` | +## Variations + +### Backend + +- `tensorrt` (default) + +### Batch-size + +- `batch_size.#` _(# can be substituted dynamically)_ + +### Batchsize-format-change + +- `pre5.0` +- `v5.0+` + +### Build-engine-options + +- `build_engine_options.#` _(# can be substituted dynamically)_ + +### Device + +- `cpu` +- `cuda` (default) + +### Device-memory + +- `gpu_memory.#` _(# can be substituted dynamically)_ +- `gpu_memory.16` +- `gpu_memory.24` +- `gpu_memory.32` +- `gpu_memory.40` +- `gpu_memory.48` +- `gpu_memory.8` +- `gpu_memory.80` + +### Dla-batch-size + +- `dla_batch_size.#` _(# can be substituted dynamically)_ + +### Gpu-connection + +- `pcie` +- `sxm` + +### Gpu-name + +- `a100` +- `a6000` +- `custom` +- `l4` +- `orin` +- `rtx_4090` +- `rtx_6000_ada` +- `t4` + +### Graphs + +- `use-graphs` + +### Loadgen-scenario + +- `multistream` +- `offline` +- `server` +- `singlestream` + +### Model + +- `3d-unet-99` (base: 3d-unet_) +- `3d-unet-99.9` (base: 3d-unet_) +- `bert-99` (base: bert_) +- `bert-99.9` (base: bert_) +- `dlrm-v2-99` (base: dlrm_) +- `dlrm-v2-99.9` (base: dlrm_) +- `gptj-99` (base: gptj_) +- `gptj-99.9` (base: gptj_) +- `llama2-70b-99` (base: llama2-70b_) +- `llama2-70b-99.9` (base: llama2-70b_) +- `resnet50` (default) +- `retinanet` +- `rnnt` +- `sdxl` + +### Num-gpus + +- `num-gpus.#` _(# can be substituted dynamically)_ +- `num-gpus.1` (default) + +### Power-mode + +- `maxn` +- `maxq` + +### Run-mode + +- `build` +- `build_engine` (alias: build-engine) +- `calibrate` +- `download_model` +- `prebuild` +- `preprocess_data` (alias: preprocess-data) +- `run_harness` (default) + +### Triton + +- `use_triton` + +### Ungrouped + +- `3d-unet_` +- `bert_` +- `default_variations` +- `dlrm_` +- `env` +- `gptj_` +- `llama2-70b_` +- `run-harness` +- `v3.1` (base: pre5.0) + +### Version + +- `v4.0` (base: pre5.0) +- `v4.1` (base: pre5.0) +- `v4.1-dev` (base: pre5.0) (default) +- `v5.0` (base: v5.0+) diff --git a/script/app-mlperf-inference/README.md b/script/app-mlperf-inference/README.md index 6a5c63c29..6f8c726c9 100644 --- a/script/app-mlperf-inference/README.md +++ b/script/app-mlperf-inference/README.md @@ -1,10 +1,10 @@ # README for app-mlperf-inference This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. -`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/user`, you can do +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do ``` -mkdir /mnt/user/MLC -ln -s /mnt/user/MLC $HOME/MLC +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC ``` You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. @@ -166,7 +166,7 @@ mlcr app,vision,language,mlcommons,mlperf,inference,generic - `llama2-70b-99` (base: llama2-70b_) - `llama2-70b-99.9` (base: llama2-70b_) - `llama3_1-405b` -- `llama3_1-8b` +- `llama3_1-8b_` - `mixtral-8x7b` (base: mixtral-8x7b) - `mobilenet` - `pointpainting` @@ -207,4 +207,6 @@ mlcr app,vision,language,mlcommons,mlperf,inference,generic - `dlrm_` - `gptj_` (alias: gptj) - `llama2-70b_` +- `llama3_1-8b` (base: llama3_1-8b_) +- `llama3_1-8b-edge` (base: llama3_1-8b_) - `power` diff --git a/script/build-mlperf-inference-server-nvidia/README.md b/script/build-mlperf-inference-server-nvidia/README.md index 280d286b0..0714b29f1 100644 --- a/script/build-mlperf-inference-server-nvidia/README.md +++ b/script/build-mlperf-inference-server-nvidia/README.md @@ -1,10 +1,10 @@ # README for build-mlperf-inference-server-nvidia This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. -`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/user`, you can do +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do ``` -mkdir /mnt/user/MLC -ln -s /mnt/user/MLC $HOME/MLC +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC ``` You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. diff --git a/script/download-file/README.md b/script/download-file/README.md index 5caa972b5..dcc6f866c 100644 --- a/script/download-file/README.md +++ b/script/download-file/README.md @@ -1,10 +1,10 @@ # README for download-file This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. -`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/user`, you can do +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do ``` -mkdir /mnt/user/MLC -ln -s /mnt/user/MLC $HOME/MLC +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC ``` You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. @@ -72,7 +72,7 @@ mlcr download file - `curl` - `gdown` - `mlcutil` (alias: cmutil) (default) -- `r2_downloader` +- `r2-downloader` - `rclone` - `wget` diff --git a/script/generate-mlperf-inference-submission/README.md b/script/generate-mlperf-inference-submission/README.md index 416a01828..603dc5d8d 100644 --- a/script/generate-mlperf-inference-submission/README.md +++ b/script/generate-mlperf-inference-submission/README.md @@ -1,10 +1,10 @@ # README for generate-mlperf-inference-submission This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. -`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/user`, you can do +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do ``` -mkdir /mnt/user/MLC -ln -s /mnt/user/MLC $HOME/MLC +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC ``` You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. diff --git a/script/get-cache-dir/README.md b/script/get-cache-dir/README.md index 172b85eec..b9f650517 100644 --- a/script/get-cache-dir/README.md +++ b/script/get-cache-dir/README.md @@ -1,10 +1,10 @@ # README for get-cache-dir This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. -`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/user`, you can do +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do ``` -mkdir /mnt/user/MLC -ln -s /mnt/user/MLC $HOME/MLC +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC ``` You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. diff --git a/script/get-generic-sys-util/README.md b/script/get-generic-sys-util/README.md index ec30d7828..45892d2fb 100644 --- a/script/get-generic-sys-util/README.md +++ b/script/get-generic-sys-util/README.md @@ -1,10 +1,10 @@ # README for get-generic-sys-util This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. -`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/user`, you can do +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do ``` -mkdir /mnt/user/MLC -ln -s /mnt/user/MLC $HOME/MLC +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC ``` You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. @@ -72,6 +72,7 @@ mlcr get,sys-util,generic,generic-sys-util - `autoconf` - `bzip2` - `cmake` +- `crossbuild-essential-arm64` - `dmidecode` - `ffmpeg` - `flex` diff --git a/script/get-git-repo/README.md b/script/get-git-repo/README.md index 9c68729c1..f79fe3b3a 100644 --- a/script/get-git-repo/README.md +++ b/script/get-git-repo/README.md @@ -1,10 +1,10 @@ # README for get-git-repo This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. -`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/user`, you can do +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do ``` -mkdir /mnt/user/MLC -ln -s /mnt/user/MLC $HOME/MLC +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC ``` You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. diff --git a/script/get-ipol-src/meta.yaml b/script/get-ipol-src/meta.yaml index ac64d8971..92e388799 100644 --- a/script/get-ipol-src/meta.yaml +++ b/script/get-ipol-src/meta.yaml @@ -13,8 +13,10 @@ extra_cache_tags_from_env: - env: MLC_IPOL_YEAR prefix: year- input_description: - number: IPOL publication number - year: IPOL publication year + number: + desc: IPOL publication number + year: + desc: IPOL publication year input_mapping: number: MLC_IPOL_NUMBER year: MLC_IPOL_YEAR diff --git a/script/get-lib-jemalloc/README.md b/script/get-lib-jemalloc/README.md index f61ba4247..f6b90d0f5 100644 --- a/script/get-lib-jemalloc/README.md +++ b/script/get-lib-jemalloc/README.md @@ -1,6 +1,35 @@ # README for get-lib-jemalloc -This README is automatically generated. Add custom content in [info.txt](info.txt). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. +This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do +``` +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC +``` +You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. + +## Setup + +If you are not on a Python development environment please refer to the [official docs](https://docs.mlcommons.org/mlcflow/install/) for the installation. + +```bash +python3 -m venv mlcflow +. mlcflow/bin/activate +pip install mlcflow +``` + +- Using a virtual environment is recommended (per `pip` best practices), but you may skip it or use `--break-system-packages` if needed. + +### Pull mlperf-automations + +Once `mlcflow` is installed: + +```bash +mlc pull repo mlcommons@mlperf-automations --pat= +``` +- `--pat` or `--ssh` is only needed if the repo is PRIVATE +- If `--pat` is avoided, you'll be asked to enter the password where you can enter your Private Access Token +- `--ssh` option can be used instead of `--pat=<>` option if you prefer to use SSH for accessing the github repository. ## Run Commands ```bash @@ -26,3 +55,21 @@ No script specific inputs | `--gh_token` | Github Token | | `` | | `--hf_token` | Huggingface Token | | `` | | `--verify_ssl` | Verify SSL | | `False` | +## Variations + +### Ungrouped + +- `branch.#` _(# can be substituted dynamically)_ +- `config.#` _(# can be substituted dynamically)_ +- `enable-prof` +- `enable-stats` +- `lg-hugepage.#` _(# can be substituted dynamically)_ +- `lg-page.#` _(# can be substituted dynamically)_ +- `lg-quantum.#` _(# can be substituted dynamically)_ +- `sha.#` _(# can be substituted dynamically)_ +- `version.official` (base: url.official) + +### Version + +- `url.#` _(# can be substituted dynamically)_ +- `url.official` (default) diff --git a/script/get-ml-model-abtf-ssd-pytorch/README.md b/script/get-ml-model-abtf-ssd-pytorch/README.md index 3ef5cbbdc..6a6e5d631 100644 --- a/script/get-ml-model-abtf-ssd-pytorch/README.md +++ b/script/get-ml-model-abtf-ssd-pytorch/README.md @@ -1,10 +1,10 @@ # README for get-ml-model-abtf-ssd-pytorch This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. -`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/user`, you can do +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do ``` -mkdir /mnt/user/MLC -ln -s /mnt/user/MLC $HOME/MLC +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC ``` You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. diff --git a/script/get-ml-model-deeplabv3_plus/README.md b/script/get-ml-model-deeplabv3_plus/README.md index 90f6525bc..438f35648 100644 --- a/script/get-ml-model-deeplabv3_plus/README.md +++ b/script/get-ml-model-deeplabv3_plus/README.md @@ -1,10 +1,10 @@ # README for get-ml-model-deeplabv3-plus This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. -`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/user`, you can do +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do ``` -mkdir /mnt/user/MLC -ln -s /mnt/user/MLC $HOME/MLC +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC ``` You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. diff --git a/script/get-ml-model-resnet50/README.md b/script/get-ml-model-resnet50/README.md index d6e3dd61f..13012c78b 100644 --- a/script/get-ml-model-resnet50/README.md +++ b/script/get-ml-model-resnet50/README.md @@ -1,10 +1,10 @@ # README for get-ml-model-resnet50 This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. -`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/user`, you can do +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do ``` -mkdir /mnt/user/MLC -ln -s /mnt/user/MLC $HOME/MLC +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC ``` You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. diff --git a/script/get-oneapi/README.md b/script/get-oneapi/README.md index 7d8dedc9c..84cc32210 100644 --- a/script/get-oneapi/README.md +++ b/script/get-oneapi/README.md @@ -1,13 +1,46 @@ # README for get-one-api -This README is automatically generated. Add custom content in [info.txt](info.txt). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. +This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do +``` +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC +``` +You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. + +## Setup + +If you are not on a Python development environment please refer to the [official docs](https://docs.mlcommons.org/mlcflow/install/) for the installation. + +```bash +python3 -m venv mlcflow +. mlcflow/bin/activate +pip install mlcflow +``` + +- Using a virtual environment is recommended (per `pip` best practices), but you may skip it or use `--break-system-packages` if needed. + +### Pull mlperf-automations + +Once `mlcflow` is installed: + +```bash +mlc pull repo mlcommons@mlperf-automations --pat= +``` +- `--pat` or `--ssh` is only needed if the repo is PRIVATE +- If `--pat` is avoided, you'll be asked to enter the password where you can enter your Private Access Token +- `--ssh` option can be used instead of `--pat=<>` option if you prefer to use SSH for accessing the github repository. ## Run Commands ```bash mlcr get,oneapi,compiler,get-oneapi ``` -No script specific inputs +### Script Inputs + +| Name | Description | Choices | Default | +|------|-------------|---------|------| +| `--oneapi_dir` | | | `` | ### Generic Script Inputs | Name | Description | Choices | Default | @@ -26,3 +59,9 @@ No script specific inputs | `--gh_token` | Github Token | | `` | | `--hf_token` | Huggingface Token | | `` | | `--verify_ssl` | Verify SSL | | `False` | +## Variations + +### Ungrouped + +- `fortran` +- `path.#` _(# can be substituted dynamically)_ diff --git a/script/get-spec-ptd/meta.yaml b/script/get-spec-ptd/meta.yaml index f642f7053..8c6087600 100644 --- a/script/get-spec-ptd/meta.yaml +++ b/script/get-spec-ptd/meta.yaml @@ -24,7 +24,8 @@ deps: - MLC_GIT_* tags: get,git,repo,_repo.https://github.com/mlcommons/power input_description: - input: Path to SPEC PTDaemon (Optional) + input: + desc: Path to SPEC PTDaemon (Optional) input_mapping: input: MLC_INPUT new_env_keys: diff --git a/script/get-tensorrt/meta.yaml b/script/get-tensorrt/meta.yaml index 5370e37d4..1263e4158 100644 --- a/script/get-tensorrt/meta.yaml +++ b/script/get-tensorrt/meta.yaml @@ -13,9 +13,10 @@ deps: tags: get,python3 docker: {} input_description: - input: Full path to the installed TensorRT library (nvinfer) - tar_file: Full path to the TensorRT Tar file downloaded from the Nvidia website - (https://developer.nvidia.com/tensorrt) + input: + desc: Full path to the installed TensorRT library (nvinfer) + tar_file: + desc: Full path to the TensorRT Tar file downloaded from the Nvidia website (https://developer.nvidia.com/tensorrt) input_mapping: input: MLC_INPUT tar_file: MLC_TENSORRT_TAR_FILE_PATH diff --git a/script/run-mlperf-inference-submission-checker/README.md b/script/run-mlperf-inference-submission-checker/README.md index 626aa9fcd..ce18e987b 100644 --- a/script/run-mlperf-inference-submission-checker/README.md +++ b/script/run-mlperf-inference-submission-checker/README.md @@ -1,10 +1,10 @@ # README for run-mlperf-inference-submission-checker This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. -`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/user`, you can do +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do ``` -mkdir /mnt/user/MLC -ln -s /mnt/user/MLC $HOME/MLC +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC ``` You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. @@ -90,3 +90,7 @@ mlcr run,mlc,mlcommons,mlperf,inference,mlperf-inference,submission,checker,subm ### Ungrouped - `short-run` + +### Version + +- `version.master` diff --git a/script/submit-mlperf-results/README.md b/script/submit-mlperf-results/README.md index 6a8108ee0..f1909fca1 100644 --- a/script/submit-mlperf-results/README.md +++ b/script/submit-mlperf-results/README.md @@ -1,10 +1,10 @@ # README for submit-mlperf-results This README is automatically generated. Add custom content in [info.md](info.md). Please follow the [script execution document](https://docs.mlcommons.org/mlcflow/targets/script/execution-flow/) to understand more about the MLC script execution. -`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/user`, you can do +`mlcflow` stores all local data under `$HOME/MLC` by default. So, if there is space constraint on the home directory and you have more space on say `/mnt/$USER`, you can do ``` -mkdir /mnt/user/MLC -ln -s /mnt/user/MLC $HOME/MLC +mkdir /mnt/$USER/MLC +ln -s /mnt/$USER/MLC $HOME/MLC ``` You can also use the `ENV` variable `MLC_REPOS` to control this location but this will need a set after every system reboot. From 812f21154a4fb7041216e15a23c5c3cda440b5fe Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 17 Aug 2025 16:46:26 +0000 Subject: [PATCH 4/4] [Automated Commit] Format Codebase [skip ci] --- automation/script/help.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/automation/script/help.py b/automation/script/help.py index f4d9ab3e5..044052ce5 100644 --- a/automation/script/help.py +++ b/automation/script/help.py @@ -42,7 +42,8 @@ def display_help(self_module, input_params): else: # Step 4: Iterate over scripts and generate help output - for script in sorted(scripts_list, key=lambda x: x.meta.get('alias', '')): + for script in sorted( + scripts_list, key=lambda x: x.meta.get('alias', '')): metadata = script.meta script_path = script.path print_script_help(