From b62606399e98fdcda54a439c3c038e0c51df8167 Mon Sep 17 00:00:00 2001 From: KumoLiu Date: Thu, 31 Aug 2023 17:57:13 +0800 Subject: [PATCH] fix #1499 Signed-off-by: KumoLiu --- model_zoo/app_integrate_bundle/README.md | 35 ++++++++++++++++------ model_zoo/app_integrate_bundle/ensemble.py | 16 ++-------- 2 files changed, 29 insertions(+), 22 deletions(-) diff --git a/model_zoo/app_integrate_bundle/README.md b/model_zoo/app_integrate_bundle/README.md index 0950dc241a..d3809d4a7e 100644 --- a/model_zoo/app_integrate_bundle/README.md +++ b/model_zoo/app_integrate_bundle/README.md @@ -83,17 +83,18 @@ python ensemble.py --bundle_root bundle_root_path --dataset_dir data_root_path ## **How to integrate Bundle in your own application** ### Get component from bundle +Check all supported properties in https://github.com/Project-MONAI/MONAI/blob/dev/monai/bundle/properties.py. ``` -from monai.bundle import ConfigWorkflow - -train_workflow = ConfigWorkflow( - config_file=bundle_config_path, - meta_file=bundle_metadata_path, - logging_file=bundle_logging_path, - workflow="train", -) -train_workflow.initialize() +from monai.bundle import create_workflow + +train_workflow = create_workflow(config_file=bundle_config_path, workflow_type="train") + +# get train postprocessing postprocessing = train_workflow.train_postprocessing + +# get meta information +version = train_workflow.version +description = train_workflow.description ``` ### Use component in your pipeline ``` @@ -108,9 +109,25 @@ evaluator = SupervisedEvaluator( ) ``` ### Update component with your own args + +- If the component you want to replace is listed [here](https://github.com/Project-MONAI/MONAI/blob/dev/monai/bundle/properties.py), you can replace it directly as below: ``` # update `max_epochs` in workflow train_workflow.max_epochs = max_epochs + +# must execute 'initialize' again after changing the content +train_workflow.initialize() +print(train_workflow.max_epochs) +``` +- Otherwise, you can override the components when you create the workflow. +``` +override = { + "network": "$@network_def.to(@device)", + "dataset#_target_": "Dataset", + "dataset#data": [{"image": filename}], + "postprocessing#transforms#2#output_postfix": "seg", + } +train_workflow = create_workflow(config_file=bundle_config_path, workflow_type="train", **override) ``` ## Questions and bugs diff --git a/model_zoo/app_integrate_bundle/ensemble.py b/model_zoo/app_integrate_bundle/ensemble.py index cfb12fc9fb..87b89836ff 100644 --- a/model_zoo/app_integrate_bundle/ensemble.py +++ b/model_zoo/app_integrate_bundle/ensemble.py @@ -18,7 +18,7 @@ import torch from monai.transforms import Compose from monai.transforms.post.dictionary import MeanEnsembled, VoteEnsembled -from monai.bundle import ConfigWorkflow +from monai.bundle import create_workflow from monai.engines import EnsembleEvaluator from monai.utils import optional_import @@ -74,12 +74,7 @@ def __init__(self, path): self.bundle_metadata_path = os.path.join(path, "configs", Const.METADATA_JSON) self.bundle_logging_path = os.path.join(path, "configs", Const.LOGGING_CONFIG) - self.train_workflow = ConfigWorkflow( - config_file=self.bundle_config_path, - meta_file=self.bundle_metadata_path, - logging_file=self.bundle_logging_path, - workflow="train", - ) + self.train_workflow = create_workflow(config_file=self.bundle_config_path, workflow_type="train") def _partition_datalist(self, datalist, n_splits=5, shuffle=False): logger.info(f"Total Records in Dataset: {len(datalist)}") @@ -110,12 +105,7 @@ def ensemble_inference(self, device, test_datalist, ensemble="Mean"): logger.info(f"Total Records in Test Dataset: {len(test_datalist)}") bundle_inference_config_path = os.path.join(self.bundle_path, "configs", inference_config_paths[0]) - inference_workflow = ConfigWorkflow( - config_file=bundle_inference_config_path, - meta_file=None, - logging_file=None, - workflow="inference", - ) + inference_workflow = create_workflow(config_file=bundle_inference_config_path, workflow_type="inference") inference_workflow.dataset_data = test_datalist # this application has an additional requirement for the bundle workflow to provide the property dataloader inference_workflow.add_property(name="dataloader", required=True, config_id="dataloader")