diff --git a/script/download-and-extract/meta.yaml b/script/download-and-extract/meta.yaml index 96ff8c6a8..02aaf1175 100644 --- a/script/download-and-extract/meta.yaml +++ b/script/download-and-extract/meta.yaml @@ -125,4 +125,9 @@ variations: download-script: tags: _wget group: download-tool + r2_downloader: + add_deps: + download-script: + tags: _r2_downloader + group: download-tool versions: {} diff --git a/script/download-file/customize.py b/script/download-file/customize.py index 5c70f7931..3e0665c79 100644 --- a/script/download-file/customize.py +++ b/script/download-file/customize.py @@ -203,6 +203,18 @@ def preprocess(i): env['MLC_DOWNLOAD_CMD'] += f" || (({del_cmd} {env['MLC_DOWNLOAD_FILENAME']} || true) && wget -nc {extra_download_options} {url})" logger.info(f"{env['MLC_DOWNLOAD_CMD']}") + elif tool == "r2_downloader": + env['MLC_DOWNLOAD_CMD'] = f"bash <(curl -s https://raw.githubusercontent.com/mlcommons/r2-downloader/refs/heads/main/mlc-r2-downloader.sh) " + if env["MLC_HOST_OS_TYPE"] == "windows": + # have to modify the variable from url to temp_url if it is + # going to be used anywhere after this point + url = url.replace("%", "%%") + temp_download_file = env['MLC_DOWNLOAD_FILENAME'].replace( + "%", "%%") + else: + temp_download_file = env['MLC_DOWNLOAD_FILENAME'] + env['MLC_DOWNLOAD_CMD'] += f" -d {q}{os.path.join(os.getcwd(), temp_download_file)}{q} {extra_download_options} {url}" + elif tool == "curl": if env.get('MLC_DOWNLOAD_FILENAME', '') != '': extra_download_options += f" --output {q}{env['MLC_DOWNLOAD_FILENAME']}{q} " diff --git a/script/download-file/meta.yaml b/script/download-file/meta.yaml index 92d11e430..e195b078f 100644 --- a/script/download-file/meta.yaml +++ b/script/download-file/meta.yaml @@ -77,4 +77,9 @@ variations: env: MLC_DOWNLOAD_TOOL: wget group: download-tool + r2_downloader: + env: + MLC_DOWNLOAD_TOOL: r2_downloader + group: download-tool + versions: {} diff --git a/script/get-dataset-cnndm/customize.py b/script/get-dataset-cnndm/customize.py index 0115d0e8c..103bb4b95 100644 --- a/script/get-dataset-cnndm/customize.py +++ b/script/get-dataset-cnndm/customize.py @@ -26,22 +26,23 @@ def preprocess(i): def postprocess(i): env = i['env'] - if env.get('MLC_TMP_ML_MODEL', '') != "llama3_1-8b": - if is_false(env.get('MLC_DATASET_CALIBRATION', '')): - env['MLC_DATASET_PATH'] = os.path.join(os.getcwd(), 'install') - env['MLC_DATASET_EVAL_PATH'] = os.path.join( - os.getcwd(), 'install', 'cnn_eval.json') - env['MLC_DATASET_CNNDM_EVAL_PATH'] = os.path.join( - os.getcwd(), 'install', 'cnn_eval.json') - env['MLC_GET_DEPENDENT_CACHED_PATH'] = env['MLC_DATASET_PATH'] + if env.get('MLC_DOWNLOAD_MODE', '') != "dry": + if env.get('MLC_TMP_ML_MODEL', '') != "llama3_1-8b": + if is_false(env.get('MLC_DATASET_CALIBRATION', '')): + env['MLC_DATASET_PATH'] = os.path.join(os.getcwd(), 'install') + env['MLC_DATASET_EVAL_PATH'] = os.path.join( + os.getcwd(), 'install', 'cnn_eval.json') + env['MLC_DATASET_CNNDM_EVAL_PATH'] = os.path.join( + os.getcwd(), 'install', 'cnn_eval.json') + env['MLC_GET_DEPENDENT_CACHED_PATH'] = env['MLC_DATASET_PATH'] + else: + env['MLC_CALIBRATION_DATASET_PATH'] = os.path.join( + os.getcwd(), 'install', 'cnn_dailymail_calibration.json') + env['MLC_CALIBRATION_DATASET_CNNDM_PATH'] = os.path.join( + os.getcwd(), 'install', 'cnn_dailymail_calibration.json') + env['MLC_GET_DEPENDENT_CACHED_PATH'] = env['MLC_CALIBRATION_DATASET_PATH'] else: - env['MLC_CALIBRATION_DATASET_PATH'] = os.path.join( - os.getcwd(), 'install', 'cnn_dailymail_calibration.json') - env['MLC_CALIBRATION_DATASET_CNNDM_PATH'] = os.path.join( - os.getcwd(), 'install', 'cnn_dailymail_calibration.json') - env['MLC_GET_DEPENDENT_CACHED_PATH'] = env['MLC_CALIBRATION_DATASET_PATH'] - else: - env['MLC_DATASET_CNNDM_EVAL_PATH'] = os.path.join( - env['MLC_DATASET_CNNDM_EVAL_PATH'], env['MLC_DATASET_CNNDM_FILENAME']) + env['MLC_DATASET_CNNDM_EVAL_PATH'] = os.path.join( + env['MLC_DATASET_CNNDM_EVAL_PATH'], env['MLC_DATASET_CNNDM_FILENAME']) return {'return': 0} diff --git a/script/get-dataset-cnndm/meta.yaml b/script/get-dataset-cnndm/meta.yaml index f1cd45335..cf70343e3 100644 --- a/script/get-dataset-cnndm/meta.yaml +++ b/script/get-dataset-cnndm/meta.yaml @@ -58,10 +58,25 @@ variations: datacenter: group: category rclone: + prehook_deps: + - tags: get,rclone + enable_if_env: + MLC_TMP_REQUIRE_DOWNLOAD: + - yes + - tags: get,rclone-config,_mlc-inference + force_cache: true + enable_if_env: + MLC_TMP_REQUIRE_DOWNLOAD: + - yes group: download-tool add_deps_recursive: dae: tags: _rclone + r2_downloader: + group: download-tool + add_deps_recursive: + dae: + tags: _r2_downloader dry-run: group: run-mode env: @@ -69,18 +84,12 @@ variations: dry-run,rclone: env: MLC_DOWNLOAD_EXTRA_OPTIONS: --dry-run + dry-run,r2_downloader: + env: + MLC_DOWNLOAD_EXTRA_OPTIONS: -x mlc: group: download-src prehook_deps: - - tags: get,rclone - enable_if_env: - MLC_TMP_REQUIRE_DOWNLOAD: - - yes - - tags: get,rclone-config,_mlc-inference - force_cache: true - enable_if_env: - MLC_TMP_REQUIRE_DOWNLOAD: - - yes - enable_if_env: MLC_TMP_REQUIRE_DOWNLOAD: - 'yes' @@ -126,6 +135,15 @@ variations: MLC_DOWNLOAD_URL: mlc-inference:mlcommons-inference-wg-public/llama3.1_8b/<<>> MLC_DOWNLOAD_FINAL_ENV_NAME: MLC_DATASET_CNNDM_EVAL_PATH MLC_EXTRACT_FINAL_ENV_NAME: MLC_DATASET_CNNDM_EVAL_PATH + validation,edge,llama3,mlc,r2_downloader: + adr: + dae: + extra_cache_tags: cnndm,dataset,llama3,val,edge + env: + MLC_DATASET_CNNDM_FILENAME: sample_cnn_eval_5000.json + MLC_DOWNLOAD_URL: https://inference.mlcommons-storage.org/metadata/llama3-1-8b-sample-cnn-eval-5000.uri + MLC_DOWNLOAD_FINAL_ENV_NAME: MLC_DATASET_CNNDM_EVAL_PATH + MLC_EXTRACT_FINAL_ENV_NAME: MLC_DATASET_CNNDM_EVAL_PATH validation,datacenter,llama3,mlc,rclone: adr: dae: @@ -135,6 +153,15 @@ variations: MLC_DOWNLOAD_URL: mlc-inference:mlcommons-inference-wg-public/llama3.1_8b/<<>> MLC_DOWNLOAD_FINAL_ENV_NAME: MLC_DATASET_CNNDM_EVAL_PATH MLC_EXTRACT_FINAL_ENV_NAME: MLC_DATASET_CNNDM_EVAL_PATH + validation,datacenter,llama3,mlc,r2_downlaoder: + adr: + dae: + extra_cache_tags: cnndm,dataset,llama3,val,datacenter + env: + MLC_DATASET_CNNDM_FILENAME: cnn_eval.json + MLC_DOWNLOAD_URL: https://inference.mlcommons-storage.org/metadata/llama3-1-8b-cnn-eval.uri + MLC_DOWNLOAD_FINAL_ENV_NAME: MLC_DATASET_CNNDM_EVAL_PATH + MLC_EXTRACT_FINAL_ENV_NAME: MLC_DATASET_CNNDM_EVAL_PATH calibation,llama3,mlc,rclone: adr: dae: @@ -144,4 +171,21 @@ variations: MLC_DOWNLOAD_URL: mlc-inference:mlcommons-inference-wg-public/llama3.1_8b/<<>> MLC_DOWNLOAD_FINAL_ENV_NAME: MLC_CALIBRATION_DATASET_CNNDM_PATH MLC_EXTRACT_FINAL_ENV_NAME: MLC_CALIBRATION_DATASET_CNNDM_PATH - \ No newline at end of file + calibation,llama3,mlc,r2_downloader: + adr: + dae: + extra_cache_tags: cnndm,dataset,llama3,calib + env: + MLC_DATASET_CNNDM_FILENAME: cnn_dailymail_calibration.json + MLC_DOWNLOAD_URL: https://inference.mlcommons-storage.org/metadata/llama3-1-8b-cnn-dailymail-calibration.uri + MLC_DOWNLOAD_FINAL_ENV_NAME: MLC_CALIBRATION_DATASET_CNNDM_PATH + MLC_EXTRACT_FINAL_ENV_NAME: MLC_CALIBRATION_DATASET_CNNDM_PATH +tests: + run_inputs: + - variations_list: + # - validation,edge,rclone,llama3,mlc,dry-run + # - validation,datacenter,rclone,llama3,mlc,dry-run + - validation,edge,r2_downloader,llama3,mlc,dry-run + - validation,datacenter,r2_downloader,llama3,mlc,dry-run + - calibration,r2_downloader,llama3,mlc,dry-run + # - calibration,rclone,llama3,mlc,dry-run \ No newline at end of file diff --git a/script/get-dataset-whisper/customize.py b/script/get-dataset-whisper/customize.py index b2ae96020..373068eee 100644 --- a/script/get-dataset-whisper/customize.py +++ b/script/get-dataset-whisper/customize.py @@ -42,30 +42,31 @@ def postprocess(i): env = i['env'] - if env.get('MLC_TMP_DATASET_TYPE', '') != "preprocessed": - cwd = env.get('MLC_OUTDIRNAME', os.getcwd()) - data_dir = os.path.join(cwd, 'data') - env['MLC_DATASET_WHISPER_PATH'] = data_dir - else: - # copy files to data folder - tmp_src_dir = env["MLC_DATASET_WHISPER_PATH"] - tmp_dest_dir = os.path.join(tmp_src_dir, "data") - - os.makedirs(tmp_dest_dir, exist_ok=True) - - items_to_copy = [ - "LibriSpeech", - "dev-all", - "dev-all-repack", - "dev-all-repack.json" - ] - - for item in items_to_copy: - src_path = os.path.join(tmp_src_dir, item) - dst_path = os.path.join(tmp_dest_dir, item) - if os.path.isdir(src_path): - shutil.copytree(src_path, dst_path, dirs_exist_ok=True) - elif os.path.isfile(src_path): - shutil.copy2(src_path, dst_path) + if env.get('MLC_DOWNLOAD_MODE', '') != "dry": + if env.get('MLC_TMP_DATASET_TYPE', '') != "preprocessed": + cwd = env.get('MLC_OUTDIRNAME', os.getcwd()) + data_dir = os.path.join(cwd, 'data') + env['MLC_DATASET_WHISPER_PATH'] = data_dir + else: + # copy files to data folder + tmp_src_dir = env["MLC_DATASET_WHISPER_PATH"] + tmp_dest_dir = os.path.join(tmp_src_dir, "data") + + os.makedirs(tmp_dest_dir, exist_ok=True) + + items_to_copy = [ + "LibriSpeech", + "dev-all", + "dev-all-repack", + "dev-all-repack.json" + ] + + for item in items_to_copy: + src_path = os.path.join(tmp_src_dir, item) + dst_path = os.path.join(tmp_dest_dir, item) + if os.path.isdir(src_path): + shutil.copytree(src_path, dst_path, dirs_exist_ok=True) + elif os.path.isfile(src_path): + shutil.copy2(src_path, dst_path) return {'return': 0} diff --git a/script/get-dataset-whisper/meta.yaml b/script/get-dataset-whisper/meta.yaml index 6b433c658..12fddf9e4 100644 --- a/script/get-dataset-whisper/meta.yaml +++ b/script/get-dataset-whisper/meta.yaml @@ -10,12 +10,15 @@ tags: - dataset - whisper uid: 2cc955c795d44978 +tests: + run_inputs: + - variations_list: + - rclone,preprocessed,mlc,dry-run + - r2_downloader,preprocessed,mlc,dry-run variations: preprocessed: group: dataset-type default: true - base: - - mlc env: MLC_TMP_DATASET_TYPE: preprocessed unprocessed: @@ -42,13 +45,28 @@ variations: dry-run,rclone: env: MLC_DOWNLOAD_EXTRA_OPTIONS: --dry-run + dry-run,r2_downloader: + env: + MLC_DOWNLOAD_EXTRA_OPTIONS: -x mlc: default: true - base: - - rclone env: MLC_DOWNLOAD_SRC: mlcommons group: download-src + rclone: + add_deps_recursive: + dae: + tags: _rclone + default: true + group: download-tool + r2_downloader: + add_deps_recursive: + dae: + tags: _r2_downloader + group: download-tool + rclone,preprocessed: + env: + MLC_DOWNLOAD_URL: mlc-inference:mlcommons-inference-wg-public/Whisper/dataset/ prehook_deps: - enable_if_env: MLC_TMP_REQUIRE_DOWNLOAD: @@ -59,12 +77,13 @@ variations: - true force_cache: true tags: get,rclone-config,_mlc-inference + mlc,preprocessed: + prehook_deps: - enable_if_env: MLC_TMP_REQUIRE_DOWNLOAD: - 'yes' env: MLC_DOWNLOAD_FINAL_ENV_NAME: MLC_DATASET_WHISPER_PATH - MLC_DOWNLOAD_URL: mlc-inference:mlcommons-inference-wg-public/Whisper/dataset/ MLC_EXTRACT_FINAL_ENV_NAME: MLC_DATASET_WHISPER_PATH extra_cache_tags: whisper,dataset force_cache: true @@ -76,9 +95,6 @@ variations: update_tags_from_env_with_prefix: _url.: - MLC_DOWNLOAD_URL - rclone: - add_deps_recursive: - dae: - tags: _rclone - default: true - group: download-tool + r2_downloader,preprocessed: + env: + MLC_DOWNLOAD_URL: https://inference.mlcommons-storage.org/metadata/whisper-dataset.uri diff --git a/script/get-ml-model-llama3/meta.yaml b/script/get-ml-model-llama3/meta.yaml index 4752cf5d3..fd79fdc33 100644 --- a/script/get-ml-model-llama3/meta.yaml +++ b/script/get-ml-model-llama3/meta.yaml @@ -33,6 +33,13 @@ tags: - llama3 - llama3-405b uid: 2f8cef2acc334e80 +tests: + needs_pat: true + run_inputs: + - variations_list: + - rclone,405b,mlc,dry-run + - r2_downloader,405b,mlc,dry-run + - r2_downloader,8b,mlc,dry-run variations: fp16: default: true @@ -46,31 +53,22 @@ variations: default: true env: MLC_ML_MODEL_NAME: Llama-3.1-405B-Instruct + MLC_ML_MODEL_R2_HOSTED_NAME: llama3-1-405b-instruct 8b: group: model-size env: MLC_ML_MODEL_NAME: Llama-3.1-8b-Instruct + MLC_ML_MODEL_R2_HOSTED_NAME: llama3-1-8b-instruct mlc: group: download-src default: true prehook_deps: - - tags: get,rclone - enable_if_env: - MLC_TMP_REQUIRE_DOWNLOAD: - - yes - - tags: get,rclone-config,_mlperf-llama3-1 - force_cache: true - enable_if_env: - MLC_TMP_REQUIRE_DOWNLOAD: - - yes - enable_if_env: MLC_TMP_REQUIRE_DOWNLOAD: - 'yes' env: MLC_DOWNLOAD_FINAL_ENV_NAME: LLAMA3_CHECKPOINT_PATH MLC_EXTRACT_FINAL_ENV_NAME: LLAMA3_CHECKPOINT_PATH - MLC_DOWNLOAD_URL: mlc-llama3-1:inference/<<>> - extra_cache_tags: llama3,dataset force_cache: true names: - dae @@ -82,12 +80,39 @@ variations: - MLC_DOWNLOAD_URL env: MLC_DOWNLOAD_SRC: mlcommons + mlc,rclone: + env: + MLC_DOWNLOAD_URL: mlc-llama3-1:inference/<<>> + adr: + dae: + extra_cache_tags: llama3,dataset,rclone + mlc,r2_downloader: + env: + MLC_DOWNLOAD_URL: https://llama3-1.mlcommons-storage.org/metadata/<<>>.uri + adr: + dae: + extra_cache_tags: llama3,dataset,rclone rclone: group: download-tool add_deps_recursive: dae: tags: _rclone + prehook_deps: + - tags: get,rclone + enable_if_env: + MLC_TMP_REQUIRE_DOWNLOAD: + - yes + - tags: get,rclone-config,_mlperf-llama3-1 + force_cache: true + enable_if_env: + MLC_TMP_REQUIRE_DOWNLOAD: + - yes default: true + r2_downloader: + group: download-tool + add_deps_recursive: + dae: + tags: _r2_downloader dry-run: group: run-mode env: @@ -95,6 +120,9 @@ variations: dry-run,rclone: env: MLC_DOWNLOAD_EXTRA_OPTIONS: --dry-run + dry-run,r2_downloader: + env: + MLC_DOWNLOAD_EXTRA_OPTIONS: -x hf: group: download-src default_variations: @@ -110,6 +138,7 @@ variations: tags: _model-stub.meta-llama/Llama-3.1-405B-Instruct env: MLC_ML_MODEL_NAME: Llama-3.1-405B-Instruct + MLC_ML_MODEL_R2_HOSTED_NAME: llama3-1-405b-instruct MLC_MODEL_ZOO_ENV_KEY: LLAMA3 group: huggingface-stub @@ -121,6 +150,7 @@ variations: tags: _model-stub.meta-llama/Llama-3.1-8B-Instruct env: MLC_ML_MODEL_NAME: Llama-3.1-8b-Instruct + MLC_ML_MODEL_R2_HOSTED_NAME: llama3-1-8b-instruct MLC_MODEL_ZOO_ENV_KEY: LLAMA3 group: huggingface-stub diff --git a/script/get-ml-model-whisper/meta.yaml b/script/get-ml-model-whisper/meta.yaml index 5442b7e8d..bd7f4eaef 100644 --- a/script/get-ml-model-whisper/meta.yaml +++ b/script/get-ml-model-whisper/meta.yaml @@ -16,6 +16,7 @@ tests: run_inputs: - variations_list: - rclone,mlc,dry-run + - r2_downloader,mlc,dry-run uid: 3bea2356e97f47b1 variations: dry-run: @@ -25,30 +26,21 @@ variations: dry-run,rclone: env: MLC_DOWNLOAD_EXTRA_OPTIONS: --dry-run + dry-run,r2_downloader: + env: + MLC_DOWNLOAD_EXTRA_OPTIONS: -x mlc: default: true env: MLC_DOWNLOAD_SRC: mlcommons group: download-src prehook_deps: - - enable_if_env: - MLC_TMP_REQUIRE_DOWNLOAD: - - true - tags: get,rclone - - enable_if_env: - MLC_TMP_REQUIRE_DOWNLOAD: - - true - env: - MLC_RCLONE_DRIVE_FOLDER_ID: 17CpM5eU8tjrxh_LpH_BTNTeT37PhzcnC - force_cache: true - tags: get,rclone-config,_mlc-inference - enable_if_env: MLC_TMP_REQUIRE_DOWNLOAD: - 'yes' env: MLC_DOWNLOAD_FINAL_ENV_NAME: MLC_ML_MODEL_WHISPER_PATH MLC_EXTRACT_FINAL_ENV_NAME: MLC_ML_MODEL_WHISPER_PATH - MLC_DOWNLOAD_URL: 'mlc-inference:mlcommons-inference-wg-public/Whisper/model/' extra_cache_tags: ml,model,whisper force_cache: true force_env_keys: @@ -63,5 +55,26 @@ variations: add_deps_recursive: dae: tags: _rclone + env: + MLC_DOWNLOAD_URL: 'mlc-inference:mlcommons-inference-wg-public/Whisper/model/' + prehook_deps: + - enable_if_env: + MLC_TMP_REQUIRE_DOWNLOAD: + - true + tags: get,rclone + - enable_if_env: + MLC_TMP_REQUIRE_DOWNLOAD: + - true + env: + MLC_RCLONE_DRIVE_FOLDER_ID: 17CpM5eU8tjrxh_LpH_BTNTeT37PhzcnC + force_cache: true + tags: get,rclone-config,_mlc-inference default: true group: download-tool + r2_downloader: + add_deps_recursive: + dae: + tags: _r2_downloader + env: + MLC_DOWNLOAD_URL: 'https://inference.mlcommons-storage.org/metadata/whisper-model.uri' + group: download-tool \ No newline at end of file diff --git a/script/get-preprocessed-dataset-mlperf-deepseek-r1/meta.yaml b/script/get-preprocessed-dataset-mlperf-deepseek-r1/meta.yaml index 4cb2ff28d..8c0da0989 100644 --- a/script/get-preprocessed-dataset-mlperf-deepseek-r1/meta.yaml +++ b/script/get-preprocessed-dataset-mlperf-deepseek-r1/meta.yaml @@ -25,15 +25,26 @@ variations: group: dataset-type env: MLC_PREPROCESSED_DATASET_TYPE: validation + validation,rclone: + env: MLC_DOWNLOAD_URL: mlc-inference:mlcommons-inference-wg-public/deepseek_r1/mlperf_deepseek_r1_dataset_4388_fp8_eval.pkl + validation,r2_downloader: + env: + MLC_DOWNLOAD_URL: https://inference.mlcommons-storage.org/metadata/deepseek-r1-dataset-4388-fp8-eval.uri calibration: group: dataset-type env: MLC_PREPROCESSED_DATASET_TYPE: calibration - MLC_DOWNLOAD_URL: mlc-inference:mlcommons-inference-wg-public/deepseek_r1/mlperf_deepseek_r1_calibration_dataset_500_fp8_eval.pkl - mlc: - group: download-src - default: true + r2_downloader: + group: download-tool + add_deps_recursive: + dae: + tags: _r2_downloader + rclone: + group: download-tool + add_deps_recursive: + dae: + tags: _rclone prehook_deps: - tags: get,rclone enable_if_env: @@ -44,6 +55,24 @@ variations: enable_if_env: MLC_TMP_REQUIRE_DOWNLOAD: - yes + default: true + dry-run: + group: run-mode + env: + MLC_DOWNLOAD_MODE: dry + dry-run,rclone: + env: + MLC_DOWNLOAD_EXTRA_OPTIONS: --dry-run + calibration,rclone: + env: + MLC_DOWNLOAD_URL: mlc-inference:mlcommons-inference-wg-public/deepseek_r1/mlperf_deepseek_r1_calibration_dataset_500_fp8_eval.pkl + calibration,r2_downloader: + env: + MLC_DOWNLOAD_URL: https://inference.mlcommons-storage.org/metadata/deepseek-r1-calibration-dataset-500-fp8-eval.uri + mlc: + group: download-src + default: true + prehook_deps: - enable_if_env: MLC_TMP_REQUIRE_DOWNLOAD: - 'yes' @@ -62,21 +91,11 @@ variations: - MLC_DOWNLOAD_URL env: MLC_DOWNLOAD_SRC: mlcommons - rclone: - group: download-tool - add_deps_recursive: - dae: - tags: _rclone - default: true - dry-run: - group: run-mode - env: - MLC_DOWNLOAD_MODE: dry - dry-run,rclone: - env: - MLC_DOWNLOAD_EXTRA_OPTIONS: --dry-run tests: run_inputs: - variations_list: - - validation,rclone,mlc,dry-run - - calibration,rclone,mlc,dry-run + - calibration,r2_downloader,mlc,dry-run + - validation,r2_downloader,mlc,dry-run + # - validation,rclone,mlc,dry-run + # - calibration,rclone,mlc,dry-run +