diff --git a/src/sagemaker/clarify.py b/src/sagemaker/clarify.py index 0bdfa7db98..781bae30fb 100644 --- a/src/sagemaker/clarify.py +++ b/src/sagemaker/clarify.py @@ -25,9 +25,11 @@ import tempfile from abc import ABC, abstractmethod -from typing import List, Union, Dict +from typing import List, Union, Dict, Optional, Any from sagemaker import image_uris, s3, utils +from sagemaker.session import Session +from sagemaker.network import NetworkConfig from sagemaker.processing import ProcessingInput, ProcessingOutput, Processor logger = logging.getLogger(__name__) @@ -38,21 +40,21 @@ class DataConfig: def __init__( self, - s3_data_input_path, - s3_output_path, - s3_analysis_config_output_path=None, - label=None, - headers=None, - features=None, - dataset_type="text/csv", - s3_compression_type="None", - joinsource=None, - facet_dataset_uri=None, - facet_headers=None, - predicted_label_dataset_uri=None, - predicted_label_headers=None, - predicted_label=None, - excluded_columns=None, + s3_data_input_path: str, + s3_output_path: str, + s3_analysis_config_output_path: Optional[str] = None, + label: Optional[str] = None, + headers: Optional[List[str]] = None, + features: Optional[List[str]] = None, + dataset_type: str = "text/csv", + s3_compression_type: str = "None", + joinsource: Optional[Union[str, int]] = None, + facet_dataset_uri: Optional[str] = None, + facet_headers: Optional[List[str]] = None, + predicted_label_dataset_uri: Optional[str] = None, + predicted_label_headers: Optional[List[str]] = None, + predicted_label: Optional[Union[str, int]] = None, + excluded_columns: Optional[Union[List[int], List[str]]] = None, ): """Initializes a configuration of both input and output datasets. @@ -65,7 +67,7 @@ def __init__( label (str): Target attribute of the model required by bias metrics. Specified as column name or index for CSV dataset or as JSONPath for JSONLines. *Required parameter* except for when the input dataset does not contain the label. - features (str): JSONPath for locating the feature columns for bias metrics if the + features (List[str]): JSONPath for locating the feature columns for bias metrics if the dataset format is JSONLines. dataset_type (str): Format of the dataset. Valid values are ``"text/csv"`` for CSV, ``"application/jsonlines"`` for JSONLines, and @@ -191,10 +193,10 @@ class BiasConfig: def __init__( self, - label_values_or_threshold, - facet_name, - facet_values_or_threshold=None, - group_name=None, + label_values_or_threshold: Union[int, float, str], + facet_name: Union[str, int, List[str], List[int]], + facet_values_or_threshold: Optional[Union[int, float, str]] = None, + group_name: Optional[str] = None, ): """Initializes a configuration of the sensitive groups in the dataset. @@ -275,17 +277,17 @@ class ModelConfig: def __init__( self, - model_name: str = None, - instance_count: int = None, - instance_type: str = None, - accept_type: str = None, - content_type: str = None, - content_template: str = None, - custom_attributes: str = None, - accelerator_type: str = None, - endpoint_name_prefix: str = None, - target_model: str = None, - endpoint_name: str = None, + model_name: Optional[str] = None, + instance_count: Optional[int] = None, + instance_type: Optional[str] = None, + accept_type: Optional[str] = None, + content_type: Optional[str] = None, + content_template: Optional[str] = None, + custom_attributes: Optional[str] = None, + accelerator_type: Optional[str] = None, + endpoint_name_prefix: Optional[str] = None, + target_model: Optional[str] = None, + endpoint_name: Optional[str] = None, ): r"""Initializes a configuration of a model and the endpoint to be created for it. @@ -414,10 +416,10 @@ class ModelPredictedLabelConfig: def __init__( self, - label=None, - probability=None, - probability_threshold=None, - label_headers=None, + label: Optional[Union[str, int]] = None, + probability: Optional[Union[str, int]] = None, + probability_threshold: Optional[float] = None, + label_headers: Optional[List[str]] = None, ): """Initializes a model output config to extract the predicted label or predicted score(s). @@ -509,7 +511,9 @@ class PDPConfig(ExplainabilityConfig): and the corresponding values are included in the analysis output. """ # noqa E501 - def __init__(self, features=None, grid_resolution=15, top_k_features=10): + def __init__( + self, features: Optional[List] = None, grid_resolution: int = 15, top_k_features: int = 10 + ): """Initializes PDP config. Args: @@ -680,8 +684,8 @@ class TextConfig: def __init__( self, - granularity, - language, + granularity: str, + language: str, ): """Initializes a text configuration. @@ -736,13 +740,13 @@ class ImageConfig: def __init__( self, - model_type, - num_segments=None, - feature_extraction_method=None, - segment_compactness=None, - max_objects=None, - iou_threshold=None, - context=None, + model_type: str, + num_segments: Optional[int] = None, + feature_extraction_method: Optional[str] = None, + segment_compactness: Optional[float] = None, + max_objects: Optional[int] = None, + iou_threshold: Optional[float] = None, + context: Optional[float] = None, ): """Initializes a config object for Computer Vision (CV) Image explainability. @@ -817,15 +821,15 @@ class SHAPConfig(ExplainabilityConfig): def __init__( self, - baseline=None, - num_samples=None, - agg_method=None, - use_logit=False, - save_local_shap_values=True, - seed=None, - num_clusters=None, - text_config=None, - image_config=None, + baseline: Optional[Union[str, List]] = None, + num_samples: Optional[int] = None, + agg_method: Optional[str] = None, + use_logit: bool = False, + save_local_shap_values: bool = True, + seed: Optional[int] = None, + num_clusters: Optional[int] = None, + text_config: Optional[TextConfig] = None, + image_config: Optional[ImageConfig] = None, ): """Initializes config for SHAP analysis. @@ -909,19 +913,19 @@ class SageMakerClarifyProcessor(Processor): def __init__( self, - role, - instance_count, - instance_type, - volume_size_in_gb=30, - volume_kms_key=None, - output_kms_key=None, - max_runtime_in_seconds=None, - sagemaker_session=None, - env=None, - tags=None, - network_config=None, - job_name_prefix=None, - version=None, + role: str, + instance_count: int, + instance_type: str, + volume_size_in_gb: int = 30, + volume_kms_key: Optional[str] = None, + output_kms_key: Optional[str] = None, + max_runtime_in_seconds: Optional[int] = None, + sagemaker_session: Optional[Session] = None, + env: Optional[Dict[str, str]] = None, + tags: Optional[List[Dict[str, str]]] = None, + network_config: Optional[NetworkConfig] = None, + job_name_prefix: Optional[str] = None, + version: Optional[str] = None, ): """Initializes a SageMakerClarifyProcessor to compute bias metrics and model explanations. @@ -993,13 +997,13 @@ def run(self, **_): def _run( self, - data_config, - analysis_config, - wait, - logs, - job_name, - kms_key, - experiment_config, + data_config: DataConfig, + analysis_config: Dict[str, Any], + wait: bool, + logs: bool, + job_name: str, + kms_key: str, + experiment_config: Dict[str, str], ): """Runs a :class:`~sagemaker.processing.ProcessingJob` with the SageMaker Clarify container @@ -1077,14 +1081,14 @@ def _run( def run_pre_training_bias( self, - data_config, - data_bias_config, - methods="all", - wait=True, - logs=True, - job_name=None, - kms_key=None, - experiment_config=None, + data_config: DataConfig, + data_bias_config: BiasConfig, + methods: Union[str, List[str]] = "all", + wait: bool = True, + logs: bool = True, + job_name: Optional[str] = None, + kms_key: Optional[str] = None, + experiment_config: Optional[Dict[str, str]] = None, ): """Runs a :class:`~sagemaker.processing.ProcessingJob` to compute pre-training bias methods @@ -1146,16 +1150,16 @@ def run_pre_training_bias( def run_post_training_bias( self, - data_config, - data_bias_config, - model_config, - model_predicted_label_config, - methods="all", - wait=True, - logs=True, - job_name=None, - kms_key=None, - experiment_config=None, + data_config: DataConfig, + data_bias_config: BiasConfig, + model_config: ModelConfig, + model_predicted_label_config: ModelPredictedLabelConfig, + methods: Union[str, List[str]] = "all", + wait: bool = True, + logs: bool = True, + job_name: Optional[str] = None, + kms_key: Optional[str] = None, + experiment_config: Optional[Dict[str, str]] = None, ): """Runs a :class:`~sagemaker.processing.ProcessingJob` to compute posttraining bias @@ -1231,17 +1235,17 @@ def run_post_training_bias( def run_bias( self, - data_config, - bias_config, - model_config, - model_predicted_label_config=None, - pre_training_methods="all", - post_training_methods="all", - wait=True, - logs=True, - job_name=None, - kms_key=None, - experiment_config=None, + data_config: DataConfig, + bias_config: BiasConfig, + model_config: ModelConfig, + model_predicted_label_config: Optional[ModelPredictedLabelConfig] = None, + pre_training_methods: Union[str, List[str]] = "all", + post_training_methods: Union[str, List[str]] = "all", + wait: bool = True, + logs: bool = True, + job_name: Optional[str] = None, + kms_key: Optional[str] = None, + experiment_config: Optional[Dict[str, str]] = None, ): """Runs a :class:`~sagemaker.processing.ProcessingJob` to compute the requested bias methods @@ -1325,15 +1329,15 @@ def run_bias( def run_explainability( self, - data_config, - model_config, - explainability_config, - model_scores=None, - wait=True, - logs=True, - job_name=None, - kms_key=None, - experiment_config=None, + data_config: DataConfig, + model_config: ModelConfig, + explainability_config: Union[ExplainabilityConfig, List], + model_scores: Optional[Union[int, str, ModelPredictedLabelConfig]] = None, + wait: bool = True, + logs: bool = True, + job_name: Optional[str] = None, + kms_key: Optional[str] = None, + experiment_config: Optional[Dict[str, str]] = None, ): """Runs a :class:`~sagemaker.processing.ProcessingJob` computing feature attributions. diff --git a/src/sagemaker/fw_utils.py b/src/sagemaker/fw_utils.py index 4f87d32d5f..5ea45e76dc 100644 --- a/src/sagemaker/fw_utils.py +++ b/src/sagemaker/fw_utils.py @@ -575,7 +575,7 @@ def validate_smdistributed( if "smdistributed" not in distribution: # Distribution strategy other than smdistributed is selected return - if is_pipeline_variable(instance_type): + if is_pipeline_variable(instance_type) or is_pipeline_variable(image_uri): # The instance_type is not available in compile time. # Rather, it's given in Pipeline execution time return diff --git a/src/sagemaker/huggingface/processing.py b/src/sagemaker/huggingface/processing.py index 3f3813b778..63810b0eb9 100644 --- a/src/sagemaker/huggingface/processing.py +++ b/src/sagemaker/huggingface/processing.py @@ -17,9 +17,15 @@ """ from __future__ import absolute_import +from typing import Union, Optional, List, Dict + +from sagemaker.session import Session +from sagemaker.network import NetworkConfig from sagemaker.processing import FrameworkProcessor from sagemaker.huggingface.estimator import HuggingFace +from sagemaker.workflow.entities import PipelineVariable + class HuggingFaceProcessor(FrameworkProcessor): """Handles Amazon SageMaker processing tasks for jobs using HuggingFace containers.""" @@ -28,25 +34,25 @@ class HuggingFaceProcessor(FrameworkProcessor): def __init__( self, - role, - instance_count, - instance_type, - transformers_version=None, - tensorflow_version=None, - pytorch_version=None, - py_version="py36", - image_uri=None, - command=None, - volume_size_in_gb=30, - volume_kms_key=None, - output_kms_key=None, - code_location=None, - max_runtime_in_seconds=None, - base_job_name=None, - sagemaker_session=None, - env=None, - tags=None, - network_config=None, + role: str, + instance_count: Union[int, PipelineVariable], + instance_type: Union[str, PipelineVariable], + transformers_version: Optional[str] = None, + tensorflow_version: Optional[str] = None, + pytorch_version: Optional[str] = None, + py_version: str = "py36", + image_uri: Optional[Union[str, PipelineVariable]] = None, + command: Optional[List[str]] = None, + volume_size_in_gb: Union[int, PipelineVariable] = 30, + volume_kms_key: Optional[Union[str, PipelineVariable]] = None, + output_kms_key: Optional[Union[str, PipelineVariable]] = None, + code_location: Optional[str] = None, + max_runtime_in_seconds: Optional[Union[int, PipelineVariable]] = None, + base_job_name: Optional[str] = None, + sagemaker_session: Optional[Session] = None, + env: Optional[Dict[str, Union[str, PipelineVariable]]] = None, + tags: Optional[List[Dict[str, Union[str, PipelineVariable]]]] = None, + network_config: Optional[NetworkConfig] = None, ): """This processor executes a Python script in a HuggingFace execution environment. diff --git a/src/sagemaker/mxnet/processing.py b/src/sagemaker/mxnet/processing.py index 663b08be4b..71bce7cdff 100644 --- a/src/sagemaker/mxnet/processing.py +++ b/src/sagemaker/mxnet/processing.py @@ -17,8 +17,13 @@ """ from __future__ import absolute_import +from typing import Union, Optional, List, Dict + +from sagemaker.session import Session +from sagemaker.network import NetworkConfig from sagemaker.mxnet.estimator import MXNet from sagemaker.processing import FrameworkProcessor +from sagemaker.workflow.entities import PipelineVariable class MXNetProcessor(FrameworkProcessor): @@ -28,23 +33,23 @@ class MXNetProcessor(FrameworkProcessor): def __init__( self, - framework_version, # New arg - role, - instance_count, - instance_type, - py_version="py3", # New kwarg - image_uri=None, - command=None, - volume_size_in_gb=30, - volume_kms_key=None, - output_kms_key=None, - code_location=None, # New arg - max_runtime_in_seconds=None, - base_job_name=None, - sagemaker_session=None, - env=None, - tags=None, - network_config=None, + framework_version: str, # New arg + role: str, + instance_count: Union[int, PipelineVariable], + instance_type: Union[str, PipelineVariable], + py_version: str = "py3", # New kwarg + image_uri: Optional[Union[str, PipelineVariable]] = None, + command: Optional[List[str]] = None, + volume_size_in_gb: Union[int, PipelineVariable] = 30, + volume_kms_key: Optional[Union[str, PipelineVariable]] = None, + output_kms_key: Optional[Union[str, PipelineVariable]] = None, + code_location: Optional[str] = None, # New arg + max_runtime_in_seconds: Optional[Union[int, PipelineVariable]] = None, + base_job_name: Optional[str] = None, + sagemaker_session: Optional[Session] = None, + env: Optional[Dict[str, Union[str, PipelineVariable]]] = None, + tags: Optional[List[Dict[str, Union[str, PipelineVariable]]]] = None, + network_config: Optional[NetworkConfig] = None, ): """This processor executes a Python script in a managed MXNet execution environment. diff --git a/src/sagemaker/pytorch/processing.py b/src/sagemaker/pytorch/processing.py index a6581efac6..73551243e3 100644 --- a/src/sagemaker/pytorch/processing.py +++ b/src/sagemaker/pytorch/processing.py @@ -17,8 +17,13 @@ """ from __future__ import absolute_import +from typing import Union, Optional, List, Dict + +from sagemaker.session import Session +from sagemaker.network import NetworkConfig from sagemaker.processing import FrameworkProcessor from sagemaker.pytorch.estimator import PyTorch +from sagemaker.workflow.entities import PipelineVariable class PyTorchProcessor(FrameworkProcessor): @@ -28,23 +33,23 @@ class PyTorchProcessor(FrameworkProcessor): def __init__( self, - framework_version, # New arg - role, - instance_count, - instance_type, - py_version="py3", # New kwarg - image_uri=None, - command=None, - volume_size_in_gb=30, - volume_kms_key=None, - output_kms_key=None, - code_location=None, # New arg - max_runtime_in_seconds=None, - base_job_name=None, - sagemaker_session=None, - env=None, - tags=None, - network_config=None, + framework_version: str, # New arg + role: str, + instance_count: Union[int, PipelineVariable], + instance_type: Union[str, PipelineVariable], + py_version: str = "py3", # New kwarg + image_uri: Optional[Union[str, PipelineVariable]] = None, + command: Optional[List[str]] = None, + volume_size_in_gb: Union[int, PipelineVariable] = 30, + volume_kms_key: Optional[Union[str, PipelineVariable]] = None, + output_kms_key: Optional[Union[str, PipelineVariable]] = None, + code_location: Optional[str] = None, # New arg + max_runtime_in_seconds: Optional[Union[int, PipelineVariable]] = None, + base_job_name: Optional[str] = None, + sagemaker_session: Optional[Session] = None, + env: Optional[Dict[str, Union[str, PipelineVariable]]] = None, + tags: Optional[List[Dict[str, Union[str, PipelineVariable]]]] = None, + network_config: Optional[NetworkConfig] = None, ): """This processor executes a Python script in a PyTorch execution environment. diff --git a/src/sagemaker/sklearn/processing.py b/src/sagemaker/sklearn/processing.py index c5445e31f4..93896eaf50 100644 --- a/src/sagemaker/sklearn/processing.py +++ b/src/sagemaker/sklearn/processing.py @@ -17,9 +17,13 @@ """ from __future__ import absolute_import +from typing import Union, List, Dict, Optional + +from sagemaker.network import NetworkConfig from sagemaker import image_uris, Session from sagemaker.processing import ScriptProcessor from sagemaker.sklearn import defaults +from sagemaker.workflow.entities import PipelineVariable class SKLearnProcessor(ScriptProcessor): @@ -27,20 +31,20 @@ class SKLearnProcessor(ScriptProcessor): def __init__( self, - framework_version, - role, - instance_type, - instance_count, - command=None, - volume_size_in_gb=30, - volume_kms_key=None, - output_kms_key=None, - max_runtime_in_seconds=None, - base_job_name=None, - sagemaker_session=None, - env=None, - tags=None, - network_config=None, + framework_version: str, # New arg + role: str, + instance_count: Union[int, PipelineVariable], + instance_type: Union[str, PipelineVariable], + command: Optional[List[str]] = None, + volume_size_in_gb: Union[int, PipelineVariable] = 30, + volume_kms_key: Optional[Union[str, PipelineVariable]] = None, + output_kms_key: Optional[Union[str, PipelineVariable]] = None, + max_runtime_in_seconds: Optional[Union[int, PipelineVariable]] = None, + base_job_name: Optional[str] = None, + sagemaker_session: Optional[Session] = None, + env: Optional[Dict[str, Union[str, PipelineVariable]]] = None, + tags: Optional[List[Dict[str, Union[str, PipelineVariable]]]] = None, + network_config: Optional[NetworkConfig] = None, ): """Initialize an ``SKLearnProcessor`` instance. @@ -53,19 +57,19 @@ def __init__( to access training data and model artifacts. After the endpoint is created, the inference code might use the IAM role, if it needs to access an AWS resource. - instance_type (str): Type of EC2 instance to use for + instance_type (str or PipelineVariable): Type of EC2 instance to use for processing, for example, 'ml.c4.xlarge'. - instance_count (int): The number of instances to run + instance_count (int or PipelineVariable): The number of instances to run the Processing job with. Defaults to 1. command ([str]): The command to run, along with any command-line flags. Example: ["python3", "-v"]. If not provided, ["python3"] or ["python2"] will be chosen based on the py_version parameter. - volume_size_in_gb (int): Size in GB of the EBS volume to + volume_size_in_gb (int or PipelineVariable): Size in GB of the EBS volume to use for storing data during processing (default: 30). - volume_kms_key (str): A KMS key for the processing + volume_kms_key (str or PipelineVariable): A KMS key for the processing volume. - output_kms_key (str): The KMS key id for all ProcessingOutputs. - max_runtime_in_seconds (int): Timeout in seconds. + output_kms_key (str or PipelineVariable): The KMS key id for all ProcessingOutputs. + max_runtime_in_seconds (int or PipelineVariable): Timeout in seconds. After this amount of time Amazon SageMaker terminates the job regardless of its current status. base_job_name (str): Prefix for processing name. If not specified, @@ -75,8 +79,10 @@ def __init__( manages interactions with Amazon SageMaker APIs and any other AWS services needed. If not specified, the processor creates one using the default AWS configuration chain. - env (dict): Environment variables to be passed to the processing job. - tags ([dict]): List of tags to be passed to the processing job. + env (dict[str, str] or dict[str, PipelineVariable]): Environment variables + to be passed to the processing job. + tags (list[dict[str, str] or list[dict[str, PipelineVariable]]): List of tags + to be passed to the processing job. network_config (sagemaker.network.NetworkConfig): A NetworkConfig object that configures network isolation, encryption of inter-container traffic, security group IDs, and subnets. diff --git a/src/sagemaker/tensorflow/processing.py b/src/sagemaker/tensorflow/processing.py index 38bf784b7c..af5ac68b9d 100644 --- a/src/sagemaker/tensorflow/processing.py +++ b/src/sagemaker/tensorflow/processing.py @@ -17,8 +17,13 @@ """ from __future__ import absolute_import +from typing import Union, List, Dict, Optional + +from sagemaker.session import Session +from sagemaker.network import NetworkConfig from sagemaker.processing import FrameworkProcessor from sagemaker.tensorflow.estimator import TensorFlow +from sagemaker.workflow.entities import PipelineVariable class TensorFlowProcessor(FrameworkProcessor): @@ -28,23 +33,23 @@ class TensorFlowProcessor(FrameworkProcessor): def __init__( self, - framework_version, # New arg - role, - instance_count, - instance_type, - py_version="py3", # New kwarg - image_uri=None, - command=None, - volume_size_in_gb=30, - volume_kms_key=None, - output_kms_key=None, - code_location=None, # New arg - max_runtime_in_seconds=None, - base_job_name=None, - sagemaker_session=None, - env=None, - tags=None, - network_config=None, + framework_version: str, # New arg + role: str, + instance_count: Union[int, PipelineVariable], + instance_type: Union[str, PipelineVariable], + py_version: str = "py3", # New kwarg + image_uri: Optional[Union[str, PipelineVariable]] = None, + command: Optional[List[str]] = None, + volume_size_in_gb: Union[int, PipelineVariable] = 30, + volume_kms_key: Optional[Union[str, PipelineVariable]] = None, + output_kms_key: Optional[Union[str, PipelineVariable]] = None, + code_location: Optional[str] = None, # New arg + max_runtime_in_seconds: Optional[Union[int, PipelineVariable]] = None, + base_job_name: Optional[str] = None, + sagemaker_session: Optional[Session] = None, + env: Optional[Dict[str, Union[str, PipelineVariable]]] = None, + tags: Optional[List[Dict[str, Union[str, PipelineVariable]]]] = None, + network_config: Optional[NetworkConfig] = None, ): """This processor executes a Python script in a TensorFlow execution environment. diff --git a/src/sagemaker/xgboost/processing.py b/src/sagemaker/xgboost/processing.py index 3ca4b2361f..41b557b731 100644 --- a/src/sagemaker/xgboost/processing.py +++ b/src/sagemaker/xgboost/processing.py @@ -17,8 +17,13 @@ """ from __future__ import absolute_import +from typing import Union, List, Dict, Optional + +from sagemaker.session import Session +from sagemaker.network import NetworkConfig from sagemaker.processing import FrameworkProcessor from sagemaker.xgboost.estimator import XGBoost +from sagemaker.workflow.entities import PipelineVariable class XGBoostProcessor(FrameworkProcessor): @@ -28,23 +33,23 @@ class XGBoostProcessor(FrameworkProcessor): def __init__( self, - framework_version, # New arg - role, - instance_count, - instance_type, - py_version="py3", # New kwarg - image_uri=None, - command=None, - volume_size_in_gb=30, - volume_kms_key=None, - output_kms_key=None, - code_location=None, # New arg - max_runtime_in_seconds=None, - base_job_name=None, - sagemaker_session=None, - env=None, - tags=None, - network_config=None, + framework_version: str, # New arg + role: str, + instance_count: Union[int, PipelineVariable], + instance_type: Union[str, PipelineVariable], + py_version: str = "py3", # New kwarg + image_uri: Optional[Union[str, PipelineVariable]] = None, + command: Optional[List[str]] = None, + volume_size_in_gb: Union[int, PipelineVariable] = 30, + volume_kms_key: Optional[Union[str, PipelineVariable]] = None, + output_kms_key: Optional[Union[str, PipelineVariable]] = None, + code_location: Optional[str] = None, # New arg + max_runtime_in_seconds: Optional[Union[int, PipelineVariable]] = None, + base_job_name: Optional[str] = None, + sagemaker_session: Optional[Session] = None, + env: Optional[Dict[str, Union[str, PipelineVariable]]] = None, + tags: Optional[List[Dict[str, Union[str, PipelineVariable]]]] = None, + network_config: Optional[NetworkConfig] = None, ): """This processor executes a Python script in an XGBoost execution environment.