From a3c55997bb677978c4c6c245eaf5c0d5efc37137 Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Tue, 17 Aug 2021 04:53:23 +0000 Subject: [PATCH] CodeGen from PR 15670 in Azure/azure-rest-api-specs Merge 463a303959d8bde96488bfbabb240a711f62c7fd into 60d6c393c7e71b45ebe0976a35fd7a5841993159 --- .../MANIFEST.in | 1 + .../_meta.json | 8 + .../_azure_machine_learning_workspaces.py | 117 +- .../machinelearningservices/_configuration.py | 2 +- .../models/__init__.py | 743 +- ...azure_machine_learning_workspaces_enums.py | 484 +- .../machinelearningservices/models/_models.py | 8723 ++++++++++++++-- .../models/_models_py3.py | 8805 +++++++++++++++-- .../models/_paged_models.py | 273 + .../operations/__init__.py | 46 +- .../_batch_deployments_operations.py | 410 + .../operations/_batch_endpoints_operations.py | 451 + .../operations/_code_containers_operations.py | 312 + .../operations/_code_versions_operations.py | 330 + .../operations/_compute_operations.py | 946 ++ .../operations/_data_containers_operations.py | 312 + .../operations/_data_versions_operations.py | 335 + .../operations/_datastores_operations.py | 405 + .../_environment_containers_operations.py | 312 + ...nment_specification_versions_operations.py | 335 + .../operations/_jobs_operations.py | 417 + ...ations.py => _labeling_jobs_operations.py} | 541 +- .../_model_containers_operations.py | 316 + .../operations/_model_versions_operations.py | 354 + .../_online_deployments_operations.py | 624 ++ .../_online_endpoints_operations.py | 790 ++ .../operations/_operations.py | 10 +- ...private_endpoint_connections_operations.py | 311 + .../_private_link_resources_operations.py | 105 + .../operations/_quotas_operations.py | 176 + .../operations/_usages_operations.py | 8 +- .../_virtual_machine_sizes_operations.py | 8 +- .../_workspace_connections_operations.py | 311 + .../_workspace_features_operations.py | 111 + .../operations/_workspace_skus_operations.py | 104 + .../operations/_workspaces_operations.py | 594 +- 36 files changed, 26251 insertions(+), 1879 deletions(-) create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_specification_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py rename sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/{_machine_learning_compute_operations.py => _labeling_jobs_operations.py} (60%) create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py create mode 100644 sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_skus_operations.py diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/MANIFEST.in b/sdk/machinelearning/azure-mgmt-machinelearningservices/MANIFEST.in index a3cb07df8765..3a9b6517412b 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/MANIFEST.in +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/MANIFEST.in @@ -1,3 +1,4 @@ +include _meta.json recursive-include tests *.py *.yaml include *.md include azure/__init__.py diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json b/sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json new file mode 100644 index 000000000000..cad4ba563763 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/_meta.json @@ -0,0 +1,8 @@ +{ + "autorest": "V2", + "use": "@microsoft.azure/autorest.python@~4.0.71", + "commit": "d82e88e57759cbb93a07594d0ada4eaa21453294", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest_command": "autorest specification/machinelearningservices/resource-manager/readme.md --keep-version-file --multiapi --no-async --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --use=@microsoft.azure/autorest.python@~4.0.71 --version=V2", + "readme": "specification/machinelearningservices/resource-manager/readme.md" +} \ No newline at end of file diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_workspaces.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_workspaces.py index e0a0e2c4d834..3bf727e1021e 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_workspaces.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_azure_machine_learning_workspaces.py @@ -17,7 +17,28 @@ from .operations import WorkspacesOperations from .operations import UsagesOperations from .operations import VirtualMachineSizesOperations -from .operations import MachineLearningComputeOperations +from .operations import QuotasOperations +from .operations import ComputeOperations +from .operations import PrivateEndpointConnectionsOperations +from .operations import PrivateLinkResourcesOperations +from .operations import WorkspaceConnectionsOperations +from .operations import BatchEndpointsOperations +from .operations import BatchDeploymentsOperations +from .operations import CodeContainersOperations +from .operations import CodeVersionsOperations +from .operations import DataContainersOperations +from .operations import DataVersionsOperations +from .operations import DatastoresOperations +from .operations import EnvironmentContainersOperations +from .operations import EnvironmentSpecificationVersionsOperations +from .operations import JobsOperations +from .operations import LabelingJobsOperations +from .operations import ModelContainersOperations +from .operations import ModelVersionsOperations +from .operations import OnlineEndpointsOperations +from .operations import OnlineDeploymentsOperations +from .operations import WorkspaceFeaturesOperations +from .operations import WorkspaceSkusOperations from . import models @@ -35,13 +56,55 @@ class AzureMachineLearningWorkspaces(SDKClient): :vartype usages: azure.mgmt.machinelearningservices.operations.UsagesOperations :ivar virtual_machine_sizes: VirtualMachineSizes operations :vartype virtual_machine_sizes: azure.mgmt.machinelearningservices.operations.VirtualMachineSizesOperations - :ivar machine_learning_compute: MachineLearningCompute operations - :vartype machine_learning_compute: azure.mgmt.machinelearningservices.operations.MachineLearningComputeOperations + :ivar quotas: Quotas operations + :vartype quotas: azure.mgmt.machinelearningservices.operations.QuotasOperations + :ivar compute: Compute operations + :vartype compute: azure.mgmt.machinelearningservices.operations.ComputeOperations + :ivar private_endpoint_connections: PrivateEndpointConnections operations + :vartype private_endpoint_connections: azure.mgmt.machinelearningservices.operations.PrivateEndpointConnectionsOperations + :ivar private_link_resources: PrivateLinkResources operations + :vartype private_link_resources: azure.mgmt.machinelearningservices.operations.PrivateLinkResourcesOperations + :ivar workspace_connections: WorkspaceConnections operations + :vartype workspace_connections: azure.mgmt.machinelearningservices.operations.WorkspaceConnectionsOperations + :ivar batch_endpoints: BatchEndpoints operations + :vartype batch_endpoints: azure.mgmt.machinelearningservices.operations.BatchEndpointsOperations + :ivar batch_deployments: BatchDeployments operations + :vartype batch_deployments: azure.mgmt.machinelearningservices.operations.BatchDeploymentsOperations + :ivar code_containers: CodeContainers operations + :vartype code_containers: azure.mgmt.machinelearningservices.operations.CodeContainersOperations + :ivar code_versions: CodeVersions operations + :vartype code_versions: azure.mgmt.machinelearningservices.operations.CodeVersionsOperations + :ivar data_containers: DataContainers operations + :vartype data_containers: azure.mgmt.machinelearningservices.operations.DataContainersOperations + :ivar data_versions: DataVersions operations + :vartype data_versions: azure.mgmt.machinelearningservices.operations.DataVersionsOperations + :ivar datastores: Datastores operations + :vartype datastores: azure.mgmt.machinelearningservices.operations.DatastoresOperations + :ivar environment_containers: EnvironmentContainers operations + :vartype environment_containers: azure.mgmt.machinelearningservices.operations.EnvironmentContainersOperations + :ivar environment_specification_versions: EnvironmentSpecificationVersions operations + :vartype environment_specification_versions: azure.mgmt.machinelearningservices.operations.EnvironmentSpecificationVersionsOperations + :ivar jobs: Jobs operations + :vartype jobs: azure.mgmt.machinelearningservices.operations.JobsOperations + :ivar labeling_jobs: LabelingJobs operations + :vartype labeling_jobs: azure.mgmt.machinelearningservices.operations.LabelingJobsOperations + :ivar model_containers: ModelContainers operations + :vartype model_containers: azure.mgmt.machinelearningservices.operations.ModelContainersOperations + :ivar model_versions: ModelVersions operations + :vartype model_versions: azure.mgmt.machinelearningservices.operations.ModelVersionsOperations + :ivar online_endpoints: OnlineEndpoints operations + :vartype online_endpoints: azure.mgmt.machinelearningservices.operations.OnlineEndpointsOperations + :ivar online_deployments: OnlineDeployments operations + :vartype online_deployments: azure.mgmt.machinelearningservices.operations.OnlineDeploymentsOperations + :ivar workspace_features: WorkspaceFeatures operations + :vartype workspace_features: azure.mgmt.machinelearningservices.operations.WorkspaceFeaturesOperations + :ivar workspace_skus: WorkspaceSkus operations + :vartype workspace_skus: azure.mgmt.machinelearningservices.operations.WorkspaceSkusOperations :param credentials: Credentials needed for the client to connect to Azure. :type credentials: :mod:`A msrestazure Credentials object` - :param subscription_id: Azure subscription identifier. + :param subscription_id: The ID of the target subscription. :type subscription_id: str :param str base_url: Service URL """ @@ -53,7 +116,7 @@ def __init__( super(AzureMachineLearningWorkspaces, self).__init__(self.config.credentials, self.config) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - self.api_version = '2019-05-01' + self.api_version = '2021-03-01-preview' self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) @@ -65,5 +128,47 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.virtual_machine_sizes = VirtualMachineSizesOperations( self._client, self.config, self._serialize, self._deserialize) - self.machine_learning_compute = MachineLearningComputeOperations( + self.quotas = QuotasOperations( + self._client, self.config, self._serialize, self._deserialize) + self.compute = ComputeOperations( + self._client, self.config, self._serialize, self._deserialize) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.workspace_connections = WorkspaceConnectionsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.batch_endpoints = BatchEndpointsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.batch_deployments = BatchDeploymentsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.code_containers = CodeContainersOperations( + self._client, self.config, self._serialize, self._deserialize) + self.code_versions = CodeVersionsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.data_containers = DataContainersOperations( + self._client, self.config, self._serialize, self._deserialize) + self.data_versions = DataVersionsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.datastores = DatastoresOperations( + self._client, self.config, self._serialize, self._deserialize) + self.environment_containers = EnvironmentContainersOperations( + self._client, self.config, self._serialize, self._deserialize) + self.environment_specification_versions = EnvironmentSpecificationVersionsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.jobs = JobsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.labeling_jobs = LabelingJobsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.model_containers = ModelContainersOperations( + self._client, self.config, self._serialize, self._deserialize) + self.model_versions = ModelVersionsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.online_endpoints = OnlineEndpointsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.online_deployments = OnlineDeploymentsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.workspace_features = WorkspaceFeaturesOperations( + self._client, self.config, self._serialize, self._deserialize) + self.workspace_skus = WorkspaceSkusOperations( self._client, self.config, self._serialize, self._deserialize) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py index 06a054e13cdb..06a3bc95b65d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/_configuration.py @@ -21,7 +21,7 @@ class AzureMachineLearningWorkspacesConfiguration(AzureConfiguration): :param credentials: Credentials needed for the client to connect to Azure. :type credentials: :mod:`A msrestazure Credentials object` - :param subscription_id: Azure subscription identifier. + :param subscription_id: The ID of the target subscription. :type subscription_id: str :param str base_url: Service URL """ diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py index 2a709812fa94..640b29769cdd 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/__init__.py @@ -10,175 +10,896 @@ # -------------------------------------------------------------------------- try: + from ._models_py3 import AccountKeyDatastoreCredentials + from ._models_py3 import AccountKeyDatastoreSecrets from ._models_py3 import AKS from ._models_py3 import AksComputeSecrets from ._models_py3 import AksNetworkingConfiguration from ._models_py3 import AKSProperties from ._models_py3 import AmlCompute from ._models_py3 import AmlComputeNodeInformation - from ._models_py3 import AmlComputeNodesInformation from ._models_py3 import AmlComputeProperties + from ._models_py3 import AmlToken + from ._models_py3 import AmlUserFeature + from ._models_py3 import AssetReferenceBase + from ._models_py3 import AssignedUser + from ._models_py3 import AutoPauseProperties + from ._models_py3 import AutoScaleProperties + from ._models_py3 import AutoScaleSettings + from ._models_py3 import AzureBlobContents + from ._models_py3 import AzureDataLakeGen1Contents + from ._models_py3 import AzureDataLakeGen2Contents + from ._models_py3 import AzureEntityResource + from ._models_py3 import AzureFileContents + from ._models_py3 import AzurePostgreSqlContents + from ._models_py3 import AzureSqlDatabaseContents + from ._models_py3 import BanditPolicy + from ._models_py3 import BatchDeployment + from ._models_py3 import BatchDeploymentTrackedResource + from ._models_py3 import BatchEndpoint + from ._models_py3 import BatchEndpointTrackedResource + from ._models_py3 import BatchOutputConfiguration + from ._models_py3 import BatchRetrySettings + from ._models_py3 import CertificateDatastoreCredentials + from ._models_py3 import CertificateDatastoreSecrets from ._models_py3 import ClusterUpdateParameters + from ._models_py3 import CocoExportSummary + from ._models_py3 import CodeConfiguration + from ._models_py3 import CodeContainer + from ._models_py3 import CodeContainerResource + from ._models_py3 import CodeVersion + from ._models_py3 import CodeVersionResource + from ._models_py3 import CommandJob from ._models_py3 import Compute + from ._models_py3 import ComputeConfiguration + from ._models_py3 import ComputeInstance + from ._models_py3 import ComputeInstanceApplication + from ._models_py3 import ComputeInstanceConnectivityEndpoints + from ._models_py3 import ComputeInstanceCreatedBy + from ._models_py3 import ComputeInstanceLastOperation + from ._models_py3 import ComputeInstanceProperties + from ._models_py3 import ComputeInstanceSshSettings from ._models_py3 import ComputeNodesInformation from ._models_py3 import ComputeResource + from ._models_py3 import ComputeSchedules from ._models_py3 import ComputeSecrets + from ._models_py3 import ComputeStartStopSchedule + from ._models_py3 import ContainerResourceRequirements + from ._models_py3 import CosmosDbSettings + from ._models_py3 import Cron + from ._models_py3 import CsvExportSummary from ._models_py3 import Databricks from ._models_py3 import DatabricksComputeSecrets from ._models_py3 import DatabricksProperties + from ._models_py3 import DataContainer + from ._models_py3 import DataContainerResource from ._models_py3 import DataFactory from ._models_py3 import DataLakeAnalytics from ._models_py3 import DataLakeAnalyticsProperties + from ._models_py3 import DataPathAssetReference + from ._models_py3 import DatasetExportSummary + from ._models_py3 import DatastoreContents + from ._models_py3 import DatastoreCredentials + from ._models_py3 import DatastoreProperties + from ._models_py3 import DatastorePropertiesResource + from ._models_py3 import DatastoreSecrets + from ._models_py3 import DataVersion + from ._models_py3 import DataVersionResource + from ._models_py3 import DeploymentLogs + from ._models_py3 import DeploymentLogsRequest + from ._models_py3 import DistributionConfiguration + from ._models_py3 import DockerBuild + from ._models_py3 import DockerImage + from ._models_py3 import DockerImagePlatform + from ._models_py3 import DockerSpecification + from ._models_py3 import EarlyTerminationPolicy + from ._models_py3 import EncryptionProperty + from ._models_py3 import EndpointAuthKeys + from ._models_py3 import EndpointAuthToken + from ._models_py3 import EnvironmentContainer + from ._models_py3 import EnvironmentContainerResource + from ._models_py3 import EnvironmentSpecificationVersion + from ._models_py3 import EnvironmentSpecificationVersionResource + from ._models_py3 import ErrorAdditionalInfo from ._models_py3 import ErrorDetail - from ._models_py3 import ErrorResponse + from ._models_py3 import ErrorResponse, ErrorResponseException + from ._models_py3 import EstimatedVMPrice + from ._models_py3 import EstimatedVMPrices + from ._models_py3 import ExportSummary + from ._models_py3 import FlavorData + from ._models_py3 import GlusterFsContents from ._models_py3 import HDInsight from ._models_py3 import HDInsightProperties + from ._models_py3 import IdAssetReference from ._models_py3 import Identity + from ._models_py3 import IdentityConfiguration + from ._models_py3 import IdentityForCmk + from ._models_py3 import InferenceContainerProperties + from ._models_py3 import InputDataBinding + from ._models_py3 import JobBase + from ._models_py3 import JobBaseResource + from ._models_py3 import JobEndpoint + from ._models_py3 import JobOutput + from ._models_py3 import K8sOnlineDeployment + from ._models_py3 import KeyVaultProperties + from ._models_py3 import LabelCategory + from ._models_py3 import LabelClass + from ._models_py3 import LabelingDatasetConfiguration + from ._models_py3 import LabelingJob + from ._models_py3 import LabelingJobImageProperties + from ._models_py3 import LabelingJobInstructions + from ._models_py3 import LabelingJobMediaProperties + from ._models_py3 import LabelingJobResource + from ._models_py3 import LabelingJobTextProperties + from ._models_py3 import LinkedInfo + from ._models_py3 import ListNotebookKeysResult + from ._models_py3 import ListStorageAccountKeysResult from ._models_py3 import ListWorkspaceKeysResult - from ._models_py3 import MachineLearningServiceError, MachineLearningServiceErrorException + from ._models_py3 import ManagedIdentity + from ._models_py3 import ManagedOnlineDeployment + from ._models_py3 import ManualScaleSettings + from ._models_py3 import MedianStoppingPolicy + from ._models_py3 import MLAssistConfiguration + from ._models_py3 import ModelContainer + from ._models_py3 import ModelContainerResource + from ._models_py3 import ModelVersion + from ._models_py3 import ModelVersionResource + from ._models_py3 import Mpi from ._models_py3 import NodeStateCounts + from ._models_py3 import NoneDatastoreCredentials + from ._models_py3 import NoneDatastoreSecrets + from ._models_py3 import NotebookAccessTokenResult + from ._models_py3 import NotebookPreparationError + from ._models_py3 import NotebookResourceInfo + from ._models_py3 import Objective + from ._models_py3 import OnlineDeployment + from ._models_py3 import OnlineDeploymentTrackedResource + from ._models_py3 import OnlineEndpoint + from ._models_py3 import OnlineEndpointTrackedResource + from ._models_py3 import OnlineRequestSettings + from ._models_py3 import OnlineScaleSettings from ._models_py3 import Operation from ._models_py3 import OperationDisplay + from ._models_py3 import OutputDataBinding + from ._models_py3 import OutputPathAssetReference + from ._models_py3 import PartialAksOnlineDeployment + from ._models_py3 import PartialBatchDeployment + from ._models_py3 import PartialBatchDeploymentPartialTrackedResource + from ._models_py3 import PartialBatchEndpoint + from ._models_py3 import PartialBatchEndpointPartialTrackedResource + from ._models_py3 import PartialManagedOnlineDeployment + from ._models_py3 import PartialOnlineDeployment + from ._models_py3 import PartialOnlineDeploymentPartialTrackedResource + from ._models_py3 import PartialOnlineEndpoint + from ._models_py3 import PartialOnlineEndpointPartialTrackedResource from ._models_py3 import Password + from ._models_py3 import PersonalComputeInstanceSettings + from ._models_py3 import PrivateEndpoint + from ._models_py3 import PrivateEndpointConnection + from ._models_py3 import PrivateLinkResource + from ._models_py3 import PrivateLinkResourceListResult + from ._models_py3 import PrivateLinkServiceConnectionState + from ._models_py3 import ProbeSettings + from ._models_py3 import ProgressMetrics + from ._models_py3 import ProxyResource + from ._models_py3 import PyTorch + from ._models_py3 import QuotaBaseProperties + from ._models_py3 import QuotaUpdateParameters + from ._models_py3 import Recurrence + from ._models_py3 import RecurrenceSchedule + from ._models_py3 import RegenerateEndpointKeysRequest from ._models_py3 import RegistryListCredentialsResult from ._models_py3 import Resource from ._models_py3 import ResourceId + from ._models_py3 import ResourceIdentity + from ._models_py3 import ResourceName + from ._models_py3 import ResourceQuota + from ._models_py3 import ResourceSkuLocationInfo + from ._models_py3 import ResourceSkuZoneDetails + from ._models_py3 import Restriction + from ._models_py3 import Route + from ._models_py3 import SasDatastoreCredentials + from ._models_py3 import SasDatastoreSecrets from ._models_py3 import ScaleSettings + from ._models_py3 import ScriptReference + from ._models_py3 import ScriptsToExecute + from ._models_py3 import ServiceManagedResourcesSettings from ._models_py3 import ServicePrincipalCredentials + from ._models_py3 import ServicePrincipalDatastoreCredentials + from ._models_py3 import ServicePrincipalDatastoreSecrets + from ._models_py3 import SetupScripts + from ._models_py3 import SharedPrivateLinkResource + from ._models_py3 import Sku + from ._models_py3 import SKUCapability + from ._models_py3 import SqlAdminDatastoreCredentials + from ._models_py3 import SqlAdminDatastoreSecrets from ._models_py3 import SslConfiguration + from ._models_py3 import StatusMessage + from ._models_py3 import SweepJob + from ._models_py3 import SynapseSpark + from ._models_py3 import SynapseSparkPoolProperties + from ._models_py3 import SynapseSparkProperties + from ._models_py3 import SystemData from ._models_py3 import SystemService + from ._models_py3 import TensorFlow + from ._models_py3 import TrackedResource + from ._models_py3 import TrialComponent + from ._models_py3 import TruncationSelectionPolicy + from ._models_py3 import UpdateWorkspaceQuotas + from ._models_py3 import UpdateWorkspaceQuotasResult from ._models_py3 import Usage from ._models_py3 import UsageName from ._models_py3 import UserAccountCredentials + from ._models_py3 import UserAssignedIdentity + from ._models_py3 import UserAssignedIdentityMeta from ._models_py3 import VirtualMachine + from ._models_py3 import VirtualMachineImage from ._models_py3 import VirtualMachineProperties from ._models_py3 import VirtualMachineSecrets from ._models_py3 import VirtualMachineSize from ._models_py3 import VirtualMachineSizeListResult from ._models_py3 import VirtualMachineSshCredentials from ._models_py3 import Workspace + from ._models_py3 import WorkspaceConnection + from ._models_py3 import WorkspaceSku from ._models_py3 import WorkspaceUpdateParameters except (SyntaxError, ImportError): + from ._models import AccountKeyDatastoreCredentials + from ._models import AccountKeyDatastoreSecrets from ._models import AKS from ._models import AksComputeSecrets from ._models import AksNetworkingConfiguration from ._models import AKSProperties from ._models import AmlCompute from ._models import AmlComputeNodeInformation - from ._models import AmlComputeNodesInformation from ._models import AmlComputeProperties + from ._models import AmlToken + from ._models import AmlUserFeature + from ._models import AssetReferenceBase + from ._models import AssignedUser + from ._models import AutoPauseProperties + from ._models import AutoScaleProperties + from ._models import AutoScaleSettings + from ._models import AzureBlobContents + from ._models import AzureDataLakeGen1Contents + from ._models import AzureDataLakeGen2Contents + from ._models import AzureEntityResource + from ._models import AzureFileContents + from ._models import AzurePostgreSqlContents + from ._models import AzureSqlDatabaseContents + from ._models import BanditPolicy + from ._models import BatchDeployment + from ._models import BatchDeploymentTrackedResource + from ._models import BatchEndpoint + from ._models import BatchEndpointTrackedResource + from ._models import BatchOutputConfiguration + from ._models import BatchRetrySettings + from ._models import CertificateDatastoreCredentials + from ._models import CertificateDatastoreSecrets from ._models import ClusterUpdateParameters + from ._models import CocoExportSummary + from ._models import CodeConfiguration + from ._models import CodeContainer + from ._models import CodeContainerResource + from ._models import CodeVersion + from ._models import CodeVersionResource + from ._models import CommandJob from ._models import Compute + from ._models import ComputeConfiguration + from ._models import ComputeInstance + from ._models import ComputeInstanceApplication + from ._models import ComputeInstanceConnectivityEndpoints + from ._models import ComputeInstanceCreatedBy + from ._models import ComputeInstanceLastOperation + from ._models import ComputeInstanceProperties + from ._models import ComputeInstanceSshSettings from ._models import ComputeNodesInformation from ._models import ComputeResource + from ._models import ComputeSchedules from ._models import ComputeSecrets + from ._models import ComputeStartStopSchedule + from ._models import ContainerResourceRequirements + from ._models import CosmosDbSettings + from ._models import Cron + from ._models import CsvExportSummary from ._models import Databricks from ._models import DatabricksComputeSecrets from ._models import DatabricksProperties + from ._models import DataContainer + from ._models import DataContainerResource from ._models import DataFactory from ._models import DataLakeAnalytics from ._models import DataLakeAnalyticsProperties + from ._models import DataPathAssetReference + from ._models import DatasetExportSummary + from ._models import DatastoreContents + from ._models import DatastoreCredentials + from ._models import DatastoreProperties + from ._models import DatastorePropertiesResource + from ._models import DatastoreSecrets + from ._models import DataVersion + from ._models import DataVersionResource + from ._models import DeploymentLogs + from ._models import DeploymentLogsRequest + from ._models import DistributionConfiguration + from ._models import DockerBuild + from ._models import DockerImage + from ._models import DockerImagePlatform + from ._models import DockerSpecification + from ._models import EarlyTerminationPolicy + from ._models import EncryptionProperty + from ._models import EndpointAuthKeys + from ._models import EndpointAuthToken + from ._models import EnvironmentContainer + from ._models import EnvironmentContainerResource + from ._models import EnvironmentSpecificationVersion + from ._models import EnvironmentSpecificationVersionResource + from ._models import ErrorAdditionalInfo from ._models import ErrorDetail - from ._models import ErrorResponse + from ._models import ErrorResponse, ErrorResponseException + from ._models import EstimatedVMPrice + from ._models import EstimatedVMPrices + from ._models import ExportSummary + from ._models import FlavorData + from ._models import GlusterFsContents from ._models import HDInsight from ._models import HDInsightProperties + from ._models import IdAssetReference from ._models import Identity + from ._models import IdentityConfiguration + from ._models import IdentityForCmk + from ._models import InferenceContainerProperties + from ._models import InputDataBinding + from ._models import JobBase + from ._models import JobBaseResource + from ._models import JobEndpoint + from ._models import JobOutput + from ._models import K8sOnlineDeployment + from ._models import KeyVaultProperties + from ._models import LabelCategory + from ._models import LabelClass + from ._models import LabelingDatasetConfiguration + from ._models import LabelingJob + from ._models import LabelingJobImageProperties + from ._models import LabelingJobInstructions + from ._models import LabelingJobMediaProperties + from ._models import LabelingJobResource + from ._models import LabelingJobTextProperties + from ._models import LinkedInfo + from ._models import ListNotebookKeysResult + from ._models import ListStorageAccountKeysResult from ._models import ListWorkspaceKeysResult - from ._models import MachineLearningServiceError, MachineLearningServiceErrorException + from ._models import ManagedIdentity + from ._models import ManagedOnlineDeployment + from ._models import ManualScaleSettings + from ._models import MedianStoppingPolicy + from ._models import MLAssistConfiguration + from ._models import ModelContainer + from ._models import ModelContainerResource + from ._models import ModelVersion + from ._models import ModelVersionResource + from ._models import Mpi from ._models import NodeStateCounts + from ._models import NoneDatastoreCredentials + from ._models import NoneDatastoreSecrets + from ._models import NotebookAccessTokenResult + from ._models import NotebookPreparationError + from ._models import NotebookResourceInfo + from ._models import Objective + from ._models import OnlineDeployment + from ._models import OnlineDeploymentTrackedResource + from ._models import OnlineEndpoint + from ._models import OnlineEndpointTrackedResource + from ._models import OnlineRequestSettings + from ._models import OnlineScaleSettings from ._models import Operation from ._models import OperationDisplay + from ._models import OutputDataBinding + from ._models import OutputPathAssetReference + from ._models import PartialAksOnlineDeployment + from ._models import PartialBatchDeployment + from ._models import PartialBatchDeploymentPartialTrackedResource + from ._models import PartialBatchEndpoint + from ._models import PartialBatchEndpointPartialTrackedResource + from ._models import PartialManagedOnlineDeployment + from ._models import PartialOnlineDeployment + from ._models import PartialOnlineDeploymentPartialTrackedResource + from ._models import PartialOnlineEndpoint + from ._models import PartialOnlineEndpointPartialTrackedResource from ._models import Password + from ._models import PersonalComputeInstanceSettings + from ._models import PrivateEndpoint + from ._models import PrivateEndpointConnection + from ._models import PrivateLinkResource + from ._models import PrivateLinkResourceListResult + from ._models import PrivateLinkServiceConnectionState + from ._models import ProbeSettings + from ._models import ProgressMetrics + from ._models import ProxyResource + from ._models import PyTorch + from ._models import QuotaBaseProperties + from ._models import QuotaUpdateParameters + from ._models import Recurrence + from ._models import RecurrenceSchedule + from ._models import RegenerateEndpointKeysRequest from ._models import RegistryListCredentialsResult from ._models import Resource from ._models import ResourceId + from ._models import ResourceIdentity + from ._models import ResourceName + from ._models import ResourceQuota + from ._models import ResourceSkuLocationInfo + from ._models import ResourceSkuZoneDetails + from ._models import Restriction + from ._models import Route + from ._models import SasDatastoreCredentials + from ._models import SasDatastoreSecrets from ._models import ScaleSettings + from ._models import ScriptReference + from ._models import ScriptsToExecute + from ._models import ServiceManagedResourcesSettings from ._models import ServicePrincipalCredentials + from ._models import ServicePrincipalDatastoreCredentials + from ._models import ServicePrincipalDatastoreSecrets + from ._models import SetupScripts + from ._models import SharedPrivateLinkResource + from ._models import Sku + from ._models import SKUCapability + from ._models import SqlAdminDatastoreCredentials + from ._models import SqlAdminDatastoreSecrets from ._models import SslConfiguration + from ._models import StatusMessage + from ._models import SweepJob + from ._models import SynapseSpark + from ._models import SynapseSparkPoolProperties + from ._models import SynapseSparkProperties + from ._models import SystemData from ._models import SystemService + from ._models import TensorFlow + from ._models import TrackedResource + from ._models import TrialComponent + from ._models import TruncationSelectionPolicy + from ._models import UpdateWorkspaceQuotas + from ._models import UpdateWorkspaceQuotasResult from ._models import Usage from ._models import UsageName from ._models import UserAccountCredentials + from ._models import UserAssignedIdentity + from ._models import UserAssignedIdentityMeta from ._models import VirtualMachine + from ._models import VirtualMachineImage from ._models import VirtualMachineProperties from ._models import VirtualMachineSecrets from ._models import VirtualMachineSize from ._models import VirtualMachineSizeListResult from ._models import VirtualMachineSshCredentials from ._models import Workspace + from ._models import WorkspaceConnection + from ._models import WorkspaceSku from ._models import WorkspaceUpdateParameters +from ._paged_models import AmlComputeNodeInformationPaged +from ._paged_models import AmlUserFeaturePaged +from ._paged_models import BatchDeploymentTrackedResourcePaged +from ._paged_models import BatchEndpointTrackedResourcePaged +from ._paged_models import CodeContainerResourcePaged +from ._paged_models import CodeVersionResourcePaged from ._paged_models import ComputeResourcePaged +from ._paged_models import DataContainerResourcePaged +from ._paged_models import DatastorePropertiesResourcePaged +from ._paged_models import DataVersionResourcePaged +from ._paged_models import EnvironmentContainerResourcePaged +from ._paged_models import EnvironmentSpecificationVersionResourcePaged +from ._paged_models import JobBaseResourcePaged +from ._paged_models import LabelingJobResourcePaged +from ._paged_models import ModelContainerResourcePaged +from ._paged_models import ModelVersionResourcePaged +from ._paged_models import OnlineDeploymentTrackedResourcePaged +from ._paged_models import OnlineEndpointTrackedResourcePaged from ._paged_models import OperationPaged +from ._paged_models import PrivateEndpointConnectionPaged +from ._paged_models import ResourceQuotaPaged from ._paged_models import UsagePaged +from ._paged_models import WorkspaceConnectionPaged from ._paged_models import WorkspacePaged +from ._paged_models import WorkspaceSkuPaged from ._azure_machine_learning_workspaces_enums import ( ProvisioningState, - UsageUnit, + EncryptionStatus, + PrivateEndpointServiceConnectionStatus, + PrivateEndpointConnectionProvisioningState, ResourceIdentityType, + CreatedByType, + UsageUnit, + VMPriceOSType, + VMTier, + QuotaUnit, + Status, + ClusterPurpose, + LoadBalancerType, + OsType, VmPriority, + RemoteLoginPortPublicAccess, AllocationState, + ApplicationSharingPolicy, + SshPublicAccess, + ComputeInstanceState, + ComputeInstanceAuthorizationType, + OperationName, + OperationStatus, + ProvisioningStatus, + ScheduleStatus, + TriggerType, + ComputePowerAction, + RecurrenceFrequency, + DaysOfWeek, ComputeType, + NodeState, + ValueFormat, + ScheduleType, + BatchLoggingLevel, + BatchOutputAction, + ResourceIdentityAssignment, + EndpointAuthMode, + DataBindingMode, + JobStatus, + ContainerType, + ContentsType, + CredentialsType, + DatasetType, + OriginType, + DeploymentProvisioningState, + DistributionType, + OperatingSystemType, + DockerSpecificationType, + EarlyTerminationPolicyType, + EndpointComputeType, + EndpointProvisioningState, + EnvironmentSpecificationType, + ExportFormatType, + Goal, + IdentityConfigurationType, + ImageAnnotationType, + JobProvisioningState, + JobType, + KeyType, + StatusMessageLevel, + TextAnnotationType, + MediaType, + OrderString, + ReferenceType, + SamplingAlgorithm, + ScaleType, + SecretsType, + ReasonCode, UnderlyingResourceAction, ) __all__ = [ + 'AccountKeyDatastoreCredentials', + 'AccountKeyDatastoreSecrets', 'AKS', 'AksComputeSecrets', 'AksNetworkingConfiguration', 'AKSProperties', 'AmlCompute', 'AmlComputeNodeInformation', - 'AmlComputeNodesInformation', 'AmlComputeProperties', + 'AmlToken', + 'AmlUserFeature', + 'AssetReferenceBase', + 'AssignedUser', + 'AutoPauseProperties', + 'AutoScaleProperties', + 'AutoScaleSettings', + 'AzureBlobContents', + 'AzureDataLakeGen1Contents', + 'AzureDataLakeGen2Contents', + 'AzureEntityResource', + 'AzureFileContents', + 'AzurePostgreSqlContents', + 'AzureSqlDatabaseContents', + 'BanditPolicy', + 'BatchDeployment', + 'BatchDeploymentTrackedResource', + 'BatchEndpoint', + 'BatchEndpointTrackedResource', + 'BatchOutputConfiguration', + 'BatchRetrySettings', + 'CertificateDatastoreCredentials', + 'CertificateDatastoreSecrets', 'ClusterUpdateParameters', + 'CocoExportSummary', + 'CodeConfiguration', + 'CodeContainer', + 'CodeContainerResource', + 'CodeVersion', + 'CodeVersionResource', + 'CommandJob', 'Compute', + 'ComputeConfiguration', + 'ComputeInstance', + 'ComputeInstanceApplication', + 'ComputeInstanceConnectivityEndpoints', + 'ComputeInstanceCreatedBy', + 'ComputeInstanceLastOperation', + 'ComputeInstanceProperties', + 'ComputeInstanceSshSettings', 'ComputeNodesInformation', 'ComputeResource', + 'ComputeSchedules', 'ComputeSecrets', + 'ComputeStartStopSchedule', + 'ContainerResourceRequirements', + 'CosmosDbSettings', + 'Cron', + 'CsvExportSummary', 'Databricks', 'DatabricksComputeSecrets', 'DatabricksProperties', + 'DataContainer', + 'DataContainerResource', 'DataFactory', 'DataLakeAnalytics', 'DataLakeAnalyticsProperties', + 'DataPathAssetReference', + 'DatasetExportSummary', + 'DatastoreContents', + 'DatastoreCredentials', + 'DatastoreProperties', + 'DatastorePropertiesResource', + 'DatastoreSecrets', + 'DataVersion', + 'DataVersionResource', + 'DeploymentLogs', + 'DeploymentLogsRequest', + 'DistributionConfiguration', + 'DockerBuild', + 'DockerImage', + 'DockerImagePlatform', + 'DockerSpecification', + 'EarlyTerminationPolicy', + 'EncryptionProperty', + 'EndpointAuthKeys', + 'EndpointAuthToken', + 'EnvironmentContainer', + 'EnvironmentContainerResource', + 'EnvironmentSpecificationVersion', + 'EnvironmentSpecificationVersionResource', + 'ErrorAdditionalInfo', 'ErrorDetail', - 'ErrorResponse', + 'ErrorResponse', 'ErrorResponseException', + 'EstimatedVMPrice', + 'EstimatedVMPrices', + 'ExportSummary', + 'FlavorData', + 'GlusterFsContents', 'HDInsight', 'HDInsightProperties', + 'IdAssetReference', 'Identity', + 'IdentityConfiguration', + 'IdentityForCmk', + 'InferenceContainerProperties', + 'InputDataBinding', + 'JobBase', + 'JobBaseResource', + 'JobEndpoint', + 'JobOutput', + 'K8sOnlineDeployment', + 'KeyVaultProperties', + 'LabelCategory', + 'LabelClass', + 'LabelingDatasetConfiguration', + 'LabelingJob', + 'LabelingJobImageProperties', + 'LabelingJobInstructions', + 'LabelingJobMediaProperties', + 'LabelingJobResource', + 'LabelingJobTextProperties', + 'LinkedInfo', + 'ListNotebookKeysResult', + 'ListStorageAccountKeysResult', 'ListWorkspaceKeysResult', - 'MachineLearningServiceError', 'MachineLearningServiceErrorException', + 'ManagedIdentity', + 'ManagedOnlineDeployment', + 'ManualScaleSettings', + 'MedianStoppingPolicy', + 'MLAssistConfiguration', + 'ModelContainer', + 'ModelContainerResource', + 'ModelVersion', + 'ModelVersionResource', + 'Mpi', 'NodeStateCounts', + 'NoneDatastoreCredentials', + 'NoneDatastoreSecrets', + 'NotebookAccessTokenResult', + 'NotebookPreparationError', + 'NotebookResourceInfo', + 'Objective', + 'OnlineDeployment', + 'OnlineDeploymentTrackedResource', + 'OnlineEndpoint', + 'OnlineEndpointTrackedResource', + 'OnlineRequestSettings', + 'OnlineScaleSettings', 'Operation', 'OperationDisplay', + 'OutputDataBinding', + 'OutputPathAssetReference', + 'PartialAksOnlineDeployment', + 'PartialBatchDeployment', + 'PartialBatchDeploymentPartialTrackedResource', + 'PartialBatchEndpoint', + 'PartialBatchEndpointPartialTrackedResource', + 'PartialManagedOnlineDeployment', + 'PartialOnlineDeployment', + 'PartialOnlineDeploymentPartialTrackedResource', + 'PartialOnlineEndpoint', + 'PartialOnlineEndpointPartialTrackedResource', 'Password', + 'PersonalComputeInstanceSettings', + 'PrivateEndpoint', + 'PrivateEndpointConnection', + 'PrivateLinkResource', + 'PrivateLinkResourceListResult', + 'PrivateLinkServiceConnectionState', + 'ProbeSettings', + 'ProgressMetrics', + 'ProxyResource', + 'PyTorch', + 'QuotaBaseProperties', + 'QuotaUpdateParameters', + 'Recurrence', + 'RecurrenceSchedule', + 'RegenerateEndpointKeysRequest', 'RegistryListCredentialsResult', 'Resource', 'ResourceId', + 'ResourceIdentity', + 'ResourceName', + 'ResourceQuota', + 'ResourceSkuLocationInfo', + 'ResourceSkuZoneDetails', + 'Restriction', + 'Route', + 'SasDatastoreCredentials', + 'SasDatastoreSecrets', 'ScaleSettings', + 'ScriptReference', + 'ScriptsToExecute', + 'ServiceManagedResourcesSettings', 'ServicePrincipalCredentials', + 'ServicePrincipalDatastoreCredentials', + 'ServicePrincipalDatastoreSecrets', + 'SetupScripts', + 'SharedPrivateLinkResource', + 'Sku', + 'SKUCapability', + 'SqlAdminDatastoreCredentials', + 'SqlAdminDatastoreSecrets', 'SslConfiguration', + 'StatusMessage', + 'SweepJob', + 'SynapseSpark', + 'SynapseSparkPoolProperties', + 'SynapseSparkProperties', + 'SystemData', 'SystemService', + 'TensorFlow', + 'TrackedResource', + 'TrialComponent', + 'TruncationSelectionPolicy', + 'UpdateWorkspaceQuotas', + 'UpdateWorkspaceQuotasResult', 'Usage', 'UsageName', 'UserAccountCredentials', + 'UserAssignedIdentity', + 'UserAssignedIdentityMeta', 'VirtualMachine', + 'VirtualMachineImage', 'VirtualMachineProperties', 'VirtualMachineSecrets', 'VirtualMachineSize', 'VirtualMachineSizeListResult', 'VirtualMachineSshCredentials', 'Workspace', + 'WorkspaceConnection', + 'WorkspaceSku', 'WorkspaceUpdateParameters', 'OperationPaged', 'WorkspacePaged', 'UsagePaged', + 'ResourceQuotaPaged', 'ComputeResourcePaged', + 'AmlComputeNodeInformationPaged', + 'PrivateEndpointConnectionPaged', + 'WorkspaceConnectionPaged', + 'BatchEndpointTrackedResourcePaged', + 'BatchDeploymentTrackedResourcePaged', + 'CodeContainerResourcePaged', + 'CodeVersionResourcePaged', + 'DataContainerResourcePaged', + 'DataVersionResourcePaged', + 'DatastorePropertiesResourcePaged', + 'EnvironmentContainerResourcePaged', + 'EnvironmentSpecificationVersionResourcePaged', + 'JobBaseResourcePaged', + 'LabelingJobResourcePaged', + 'ModelContainerResourcePaged', + 'ModelVersionResourcePaged', + 'OnlineEndpointTrackedResourcePaged', + 'OnlineDeploymentTrackedResourcePaged', + 'AmlUserFeaturePaged', + 'WorkspaceSkuPaged', 'ProvisioningState', - 'UsageUnit', + 'EncryptionStatus', + 'PrivateEndpointServiceConnectionStatus', + 'PrivateEndpointConnectionProvisioningState', 'ResourceIdentityType', + 'CreatedByType', + 'UsageUnit', + 'VMPriceOSType', + 'VMTier', + 'QuotaUnit', + 'Status', + 'ClusterPurpose', + 'LoadBalancerType', + 'OsType', 'VmPriority', + 'RemoteLoginPortPublicAccess', 'AllocationState', + 'ApplicationSharingPolicy', + 'SshPublicAccess', + 'ComputeInstanceState', + 'ComputeInstanceAuthorizationType', + 'OperationName', + 'OperationStatus', + 'ProvisioningStatus', + 'ScheduleStatus', + 'TriggerType', + 'ComputePowerAction', + 'RecurrenceFrequency', + 'DaysOfWeek', 'ComputeType', + 'NodeState', + 'ValueFormat', + 'ScheduleType', + 'BatchLoggingLevel', + 'BatchOutputAction', + 'ResourceIdentityAssignment', + 'EndpointAuthMode', + 'DataBindingMode', + 'JobStatus', + 'ContainerType', + 'ContentsType', + 'CredentialsType', + 'DatasetType', + 'OriginType', + 'DeploymentProvisioningState', + 'DistributionType', + 'OperatingSystemType', + 'DockerSpecificationType', + 'EarlyTerminationPolicyType', + 'EndpointComputeType', + 'EndpointProvisioningState', + 'EnvironmentSpecificationType', + 'ExportFormatType', + 'Goal', + 'IdentityConfigurationType', + 'ImageAnnotationType', + 'JobProvisioningState', + 'JobType', + 'KeyType', + 'StatusMessageLevel', + 'TextAnnotationType', + 'MediaType', + 'OrderString', + 'ReferenceType', + 'SamplingAlgorithm', + 'ScaleType', + 'SecretsType', + 'ReasonCode', 'UnderlyingResourceAction', ] diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py index 0e95e8e3d7ff..f85260de4694 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py @@ -23,14 +23,97 @@ class ProvisioningState(str, Enum): canceled = "Canceled" -class UsageUnit(str, Enum): +class EncryptionStatus(str, Enum): - count = "Count" + enabled = "Enabled" + disabled = "Disabled" + + +class PrivateEndpointServiceConnectionStatus(str, Enum): + + pending = "Pending" + approved = "Approved" + rejected = "Rejected" + disconnected = "Disconnected" + timeout = "Timeout" + + +class PrivateEndpointConnectionProvisioningState(str, Enum): + + succeeded = "Succeeded" + creating = "Creating" + deleting = "Deleting" + failed = "Failed" class ResourceIdentityType(str, Enum): system_assigned = "SystemAssigned" + system_assigned_user_assigned = "SystemAssigned,UserAssigned" + user_assigned = "UserAssigned" + none = "None" + + +class CreatedByType(str, Enum): + + user = "User" + application = "Application" + managed_identity = "ManagedIdentity" + key = "Key" + + +class UsageUnit(str, Enum): + + count = "Count" + + +class VMPriceOSType(str, Enum): + + linux = "Linux" + windows = "Windows" + + +class VMTier(str, Enum): + + standard = "Standard" + low_priority = "LowPriority" + spot = "Spot" + + +class QuotaUnit(str, Enum): + + count = "Count" + + +class Status(str, Enum): + + undefined = "Undefined" + success = "Success" + failure = "Failure" + invalid_quota_below_cluster_minimum = "InvalidQuotaBelowClusterMinimum" + invalid_quota_exceeds_subscription_limit = "InvalidQuotaExceedsSubscriptionLimit" + invalid_vm_family_name = "InvalidVMFamilyName" + operation_not_supported_for_sku = "OperationNotSupportedForSku" + operation_not_enabled_for_region = "OperationNotEnabledForRegion" + + +class ClusterPurpose(str, Enum): + + fast_prod = "FastProd" + dense_prod = "DenseProd" + dev_test = "DevTest" + + +class LoadBalancerType(str, Enum): + + public_ip = "PublicIp" + internal_load_balancer = "InternalLoadBalancer" + + +class OsType(str, Enum): + + linux = "Linux" + windows = "Windows" class VmPriority(str, Enum): @@ -39,21 +122,418 @@ class VmPriority(str, Enum): low_priority = "LowPriority" +class RemoteLoginPortPublicAccess(str, Enum): + + enabled = "Enabled" + disabled = "Disabled" + not_specified = "NotSpecified" + + class AllocationState(str, Enum): steady = "Steady" resizing = "Resizing" +class ApplicationSharingPolicy(str, Enum): + + personal = "Personal" + shared = "Shared" + + +class SshPublicAccess(str, Enum): + + enabled = "Enabled" + disabled = "Disabled" + + +class ComputeInstanceState(str, Enum): + + creating = "Creating" + create_failed = "CreateFailed" + deleting = "Deleting" + running = "Running" + restarting = "Restarting" + job_running = "JobRunning" + setting_up = "SettingUp" + setup_failed = "SetupFailed" + starting = "Starting" + stopped = "Stopped" + stopping = "Stopping" + user_setting_up = "UserSettingUp" + user_setup_failed = "UserSetupFailed" + unknown = "Unknown" + unusable = "Unusable" + + +class ComputeInstanceAuthorizationType(str, Enum): + + personal = "personal" + + +class OperationName(str, Enum): + + create = "Create" + start = "Start" + stop = "Stop" + restart = "Restart" + reimage = "Reimage" + delete = "Delete" + + +class OperationStatus(str, Enum): + + in_progress = "InProgress" + succeeded = "Succeeded" + create_failed = "CreateFailed" + start_failed = "StartFailed" + stop_failed = "StopFailed" + restart_failed = "RestartFailed" + reimage_failed = "ReimageFailed" + delete_failed = "DeleteFailed" + + +class ProvisioningStatus(str, Enum): + + completed = "Completed" + provisioning = "Provisioning" + failed = "Failed" + + +class ScheduleStatus(str, Enum): + + enabled = "Enabled" + disabled = "Disabled" + + +class TriggerType(str, Enum): + + recurrence = "Recurrence" + cron = "Cron" + + +class ComputePowerAction(str, Enum): + + start = "Start" + stop = "Stop" + + +class RecurrenceFrequency(str, Enum): + + not_specified = "NotSpecified" + second = "Second" + minute = "Minute" + hour = "Hour" + day = "Day" + week = "Week" + month = "Month" + year = "Year" + + +class DaysOfWeek(str, Enum): + + sunday = "Sunday" + monday = "Monday" + tuesday = "Tuesday" + wednesday = "Wednesday" + thursday = "Thursday" + friday = "Friday" + saturday = "Saturday" + + class ComputeType(str, Enum): aks = "AKS" aml_compute = "AmlCompute" + compute_instance = "ComputeInstance" data_factory = "DataFactory" virtual_machine = "VirtualMachine" hd_insight = "HDInsight" databricks = "Databricks" data_lake_analytics = "DataLakeAnalytics" + synapse_spark = "SynapseSpark" + + +class NodeState(str, Enum): + + idle = "idle" + running = "running" + preparing = "preparing" + unusable = "unusable" + leaving = "leaving" + preempted = "preempted" + + +class ValueFormat(str, Enum): + + json = "JSON" + + +class ScheduleType(str, Enum): + + compute_start_stop = "ComputeStartStop" + + +class BatchLoggingLevel(str, Enum): + + info = "Info" + warning = "Warning" + debug = "Debug" + + +class BatchOutputAction(str, Enum): + + summary_only = "SummaryOnly" + append_row = "AppendRow" + + +class ResourceIdentityAssignment(str, Enum): + + system_assigned = "SystemAssigned" + user_assigned = "UserAssigned" + system_assigned_user_assigned = "SystemAssigned,UserAssigned" + none = "None" + + +class EndpointAuthMode(str, Enum): + + aml_token = "AMLToken" + key = "Key" + aad_token = "AADToken" + + +class DataBindingMode(str, Enum): + + mount = "Mount" + download = "Download" + upload = "Upload" + + +class JobStatus(str, Enum): + + not_started = "NotStarted" + starting = "Starting" + provisioning = "Provisioning" + preparing = "Preparing" + queued = "Queued" + running = "Running" + finalizing = "Finalizing" + cancel_requested = "CancelRequested" + completed = "Completed" + failed = "Failed" + canceled = "Canceled" + not_responding = "NotResponding" + paused = "Paused" + unknown = "Unknown" + + +class ContainerType(str, Enum): + + storage_initializer = "StorageInitializer" + inference_server = "InferenceServer" + + +class ContentsType(str, Enum): + + azure_blob = "AzureBlob" + azure_data_lake_gen1 = "AzureDataLakeGen1" + azure_data_lake_gen2 = "AzureDataLakeGen2" + azure_file = "AzureFile" + azure_my_sql = "AzureMySql" + azure_postgre_sql = "AzurePostgreSql" + azure_sql_database = "AzureSqlDatabase" + gluster_fs = "GlusterFs" + + +class CredentialsType(str, Enum): + + account_key = "AccountKey" + certificate = "Certificate" + none = "None" + sas = "Sas" + service_principal = "ServicePrincipal" + sql_admin = "SqlAdmin" + + +class DatasetType(str, Enum): + + simple = "Simple" + dataflow = "Dataflow" + + +class OriginType(str, Enum): + + synapse = "Synapse" + + +class DeploymentProvisioningState(str, Enum): + + creating = "Creating" + deleting = "Deleting" + scaling = "Scaling" + updating = "Updating" + succeeded = "Succeeded" + failed = "Failed" + canceled = "Canceled" + + +class DistributionType(str, Enum): + + py_torch = "PyTorch" + tensor_flow = "TensorFlow" + mpi = "Mpi" + + +class OperatingSystemType(str, Enum): + + linux = "Linux" + windows = "Windows" + + +class DockerSpecificationType(str, Enum): + + build = "Build" + image = "Image" + + +class EarlyTerminationPolicyType(str, Enum): + + bandit = "Bandit" + median_stopping = "MedianStopping" + truncation_selection = "TruncationSelection" + + +class EndpointComputeType(str, Enum): + + managed = "Managed" + k8_s = "K8S" + azure_ml_compute = "AzureMLCompute" + + +class EndpointProvisioningState(str, Enum): + + creating = "Creating" + deleting = "Deleting" + succeeded = "Succeeded" + failed = "Failed" + updating = "Updating" + canceled = "Canceled" + + +class EnvironmentSpecificationType(str, Enum): + + curated = "Curated" + user_created = "UserCreated" + + +class ExportFormatType(str, Enum): + + dataset = "Dataset" + coco = "Coco" + csv = "CSV" + + +class Goal(str, Enum): + + minimize = "Minimize" + maximize = "Maximize" + + +class IdentityConfigurationType(str, Enum): + + managed = "Managed" + aml_token = "AMLToken" + + +class ImageAnnotationType(str, Enum): + + classification = "Classification" + bounding_box = "BoundingBox" + instance_segmentation = "InstanceSegmentation" + + +class JobProvisioningState(str, Enum): + + succeeded = "Succeeded" + failed = "Failed" + canceled = "Canceled" + in_progress = "InProgress" + + +class JobType(str, Enum): + + command = "Command" + sweep = "Sweep" + labeling = "Labeling" + + +class KeyType(str, Enum): + + primary = "Primary" + secondary = "Secondary" + + +class StatusMessageLevel(str, Enum): + + error = "Error" + information = "Information" + warning = "Warning" + + +class TextAnnotationType(str, Enum): + + classification = "Classification" + + +class MediaType(str, Enum): + + image = "Image" + text = "Text" + + +class OrderString(str, Enum): + + created_at_desc = "CreatedAtDesc" + created_at_asc = "CreatedAtAsc" + updated_at_desc = "UpdatedAtDesc" + updated_at_asc = "UpdatedAtAsc" + + +class ReferenceType(str, Enum): + + id = "Id" + data_path = "DataPath" + output_path = "OutputPath" + + +class SamplingAlgorithm(str, Enum): + + grid = "Grid" + random = "Random" + bayesian = "Bayesian" + + +class ScaleType(str, Enum): + + auto = "Auto" + manual = "Manual" + + +class SecretsType(str, Enum): + + account_key = "AccountKey" + certificate = "Certificate" + none = "None" + sas = "Sas" + service_principal = "ServicePrincipal" + sql_admin = "SqlAdmin" + + +class ReasonCode(str, Enum): + + not_specified = "NotSpecified" + not_available_for_region = "NotAvailableForRegion" + not_available_for_subscription = "NotAvailableForSubscription" class UnderlyingResourceAction(str, Enum): diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models.py index 8ded2b35dd56..70e87a966973 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models.py @@ -13,12 +13,128 @@ from msrest.exceptions import HttpOperationError +class DatastoreCredentials(Model): + """Base definition for datastore credentials. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AccountKeyDatastoreCredentials, + CertificateDatastoreCredentials, NoneDatastoreCredentials, + SasDatastoreCredentials, ServicePrincipalDatastoreCredentials, + SqlAdminDatastoreCredentials + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + """ + + _validation = { + 'credentials_type': {'required': True}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + } + + _subtype_map = { + 'credentials_type': {'AccountKey': 'AccountKeyDatastoreCredentials', 'Certificate': 'CertificateDatastoreCredentials', 'None': 'NoneDatastoreCredentials', 'Sas': 'SasDatastoreCredentials', 'ServicePrincipal': 'ServicePrincipalDatastoreCredentials', 'SqlAdmin': 'SqlAdminDatastoreCredentials'} + } + + def __init__(self, **kwargs): + super(DatastoreCredentials, self).__init__(**kwargs) + self.credentials_type = None + + +class AccountKeyDatastoreCredentials(DatastoreCredentials): + """Account key datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + :param secrets: Storage account secrets. + :type secrets: + ~azure.mgmt.machinelearningservices.models.AccountKeyDatastoreSecrets + """ + + _validation = { + 'credentials_type': {'required': True}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'AccountKeyDatastoreSecrets'}, + } + + def __init__(self, **kwargs): + super(AccountKeyDatastoreCredentials, self).__init__(**kwargs) + self.secrets = kwargs.get('secrets', None) + self.credentials_type = 'AccountKey' + + +class DatastoreSecrets(Model): + """Base definition for datastore secrets. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AccountKeyDatastoreSecrets, CertificateDatastoreSecrets, + NoneDatastoreSecrets, SasDatastoreSecrets, + ServicePrincipalDatastoreSecrets, SqlAdminDatastoreSecrets + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + } + + _subtype_map = { + 'secrets_type': {'AccountKey': 'AccountKeyDatastoreSecrets', 'Certificate': 'CertificateDatastoreSecrets', 'None': 'NoneDatastoreSecrets', 'Sas': 'SasDatastoreSecrets', 'ServicePrincipal': 'ServicePrincipalDatastoreSecrets', 'SqlAdmin': 'SqlAdminDatastoreSecrets'} + } + + def __init__(self, **kwargs): + super(DatastoreSecrets, self).__init__(**kwargs) + self.secrets_type = None + + +class AccountKeyDatastoreSecrets(DatastoreSecrets): + """Datastore account key secrets. + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + :param key: Storage account key. + :type key: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AccountKeyDatastoreSecrets, self).__init__(**kwargs) + self.key = kwargs.get('key', None) + self.secrets_type = 'AccountKey' + + class Compute(Model): """Machine Learning compute object. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AKS, AmlCompute, VirtualMachine, HDInsight, DataFactory, - Databricks, DataLakeAnalytics + sub-classes are: AKS, AmlCompute, ComputeInstance, VirtualMachine, + HDInsight, DataFactory, Databricks, DataLakeAnalytics Variables are only populated by the server, and will be ignored when sending a request. @@ -35,19 +151,22 @@ class Compute(Model): ~azure.mgmt.machinelearningservices.models.ProvisioningState :param description: The description of the Machine Learning compute. :type description: str - :ivar created_on: The date and time when the compute was created. + :ivar created_on: The time at which the compute was created. :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: datetime :param resource_id: ARM resource id of the underlying compute :type resource_id: str :ivar provisioning_errors: Errors during provisioning :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool :param compute_type: Required. Constant filled by server. :type compute_type: str """ @@ -68,13 +187,14 @@ class Compute(Model): 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, } _subtype_map = { - 'compute_type': {'AKS': 'AKS', 'AmlCompute': 'AmlCompute', 'VirtualMachine': 'VirtualMachine', 'HDInsight': 'HDInsight', 'DataFactory': 'DataFactory', 'Databricks': 'Databricks', 'DataLakeAnalytics': 'DataLakeAnalytics'} + 'compute_type': {'AKS': 'AKS', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'VirtualMachine': 'VirtualMachine', 'HDInsight': 'HDInsight', 'DataFactory': 'DataFactory', 'Databricks': 'Databricks', 'DataLakeAnalytics': 'DataLakeAnalytics'} } def __init__(self, **kwargs): @@ -87,6 +207,7 @@ def __init__(self, **kwargs): self.resource_id = kwargs.get('resource_id', None) self.provisioning_errors = None self.is_attached_compute = None + self.disable_local_auth = kwargs.get('disable_local_auth', None) self.compute_type = None @@ -108,19 +229,22 @@ class AKS(Compute): ~azure.mgmt.machinelearningservices.models.ProvisioningState :param description: The description of the Machine Learning compute. :type description: str - :ivar created_on: The date and time when the compute was created. + :ivar created_on: The time at which the compute was created. :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: datetime :param resource_id: ARM resource id of the underlying compute :type resource_id: str :ivar provisioning_errors: Errors during provisioning :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool :param compute_type: Required. Constant filled by server. :type compute_type: str :param properties: AKS properties @@ -143,8 +267,9 @@ class AKS(Compute): 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'AKSProperties'}, } @@ -277,26 +402,39 @@ class AKSProperties(Model): :type agent_count: int :param agent_vm_size: Agent virtual machine size :type agent_vm_size: str + :param cluster_purpose: Intended usage of the cluster. Possible values + include: 'FastProd', 'DenseProd', 'DevTest'. Default value: "FastProd" . + :type cluster_purpose: str or + ~azure.mgmt.machinelearningservices.models.ClusterPurpose :param ssl_configuration: SSL configuration :type ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration :param aks_networking_configuration: AKS networking configuration for vnet :type aks_networking_configuration: ~azure.mgmt.machinelearningservices.models.AksNetworkingConfiguration + :param load_balancer_type: Load Balancer Type. Possible values include: + 'PublicIp', 'InternalLoadBalancer'. Default value: "PublicIp" . + :type load_balancer_type: str or + ~azure.mgmt.machinelearningservices.models.LoadBalancerType + :param load_balancer_subnet: Load Balancer Subnet + :type load_balancer_subnet: str """ _validation = { 'system_services': {'readonly': True}, - 'agent_count': {'minimum': 1}, + 'agent_count': {'minimum': 0}, } _attribute_map = { 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'}, 'system_services': {'key': 'systemServices', 'type': '[SystemService]'}, 'agent_count': {'key': 'agentCount', 'type': 'int'}, - 'agent_vm_size': {'key': 'agentVMSize', 'type': 'str'}, + 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'}, + 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'}, 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'}, 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'}, + 'load_balancer_type': {'key': 'loadBalancerType', 'type': 'str'}, + 'load_balancer_subnet': {'key': 'loadBalancerSubnet', 'type': 'str'}, } def __init__(self, **kwargs): @@ -305,8 +443,11 @@ def __init__(self, **kwargs): self.system_services = None self.agent_count = kwargs.get('agent_count', None) self.agent_vm_size = kwargs.get('agent_vm_size', None) + self.cluster_purpose = kwargs.get('cluster_purpose', "FastProd") self.ssl_configuration = kwargs.get('ssl_configuration', None) self.aks_networking_configuration = kwargs.get('aks_networking_configuration', None) + self.load_balancer_type = kwargs.get('load_balancer_type', "PublicIp") + self.load_balancer_subnet = kwargs.get('load_balancer_subnet', None) class AmlCompute(Compute): @@ -327,19 +468,22 @@ class AmlCompute(Compute): ~azure.mgmt.machinelearningservices.models.ProvisioningState :param description: The description of the Machine Learning compute. :type description: str - :ivar created_on: The date and time when the compute was created. + :ivar created_on: The time at which the compute was created. :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: datetime :param resource_id: ARM resource id of the underlying compute :type resource_id: str :ivar provisioning_errors: Errors during provisioning :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool :param compute_type: Required. Constant filled by server. :type compute_type: str :param properties: AML Compute properties @@ -363,8 +507,9 @@ class AmlCompute(Compute): 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, } @@ -383,102 +528,50 @@ class AmlComputeNodeInformation(Model): :ivar node_id: Node ID. ID of the compute node. :vartype node_id: str - :ivar ip_address: IP address. Public IP address of the compute node. - :vartype ip_address: str + :ivar private_ip_address: Private IP address. Private IP address of the + compute node. + :vartype private_ip_address: str + :ivar public_ip_address: Public IP address. Public IP address of the + compute node. + :vartype public_ip_address: str :ivar port: Port. SSH port number of the node. :vartype port: float + :ivar node_state: State of the compute node. Values are idle, running, + preparing, unusable, leaving and preempted. Possible values include: + 'idle', 'running', 'preparing', 'unusable', 'leaving', 'preempted' + :vartype node_state: str or + ~azure.mgmt.machinelearningservices.models.NodeState + :ivar run_id: Run ID. ID of the Experiment running on the node, if any + else null. + :vartype run_id: str """ _validation = { 'node_id': {'readonly': True}, - 'ip_address': {'readonly': True}, + 'private_ip_address': {'readonly': True}, + 'public_ip_address': {'readonly': True}, 'port': {'readonly': True}, + 'node_state': {'readonly': True}, + 'run_id': {'readonly': True}, } _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, 'port': {'key': 'port', 'type': 'float'}, + 'node_state': {'key': 'nodeState', 'type': 'str'}, + 'run_id': {'key': 'runId', 'type': 'str'}, } def __init__(self, **kwargs): super(AmlComputeNodeInformation, self).__init__(**kwargs) self.node_id = None - self.ip_address = None + self.private_ip_address = None + self.public_ip_address = None self.port = None - - -class ComputeNodesInformation(Model): - """Compute nodes information related to a Machine Learning compute. Might - differ for every type of compute. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmlComputeNodesInformation - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar next_link: The continuation token. - :vartype next_link: str - :param compute_type: Required. Constant filled by server. - :type compute_type: str - """ - - _validation = { - 'next_link': {'readonly': True}, - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - } - - _subtype_map = { - 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'} - } - - def __init__(self, **kwargs): - super(ComputeNodesInformation, self).__init__(**kwargs) - self.next_link = None - self.compute_type = None - - -class AmlComputeNodesInformation(ComputeNodesInformation): - """Compute node information related to a AmlCompute. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar next_link: The continuation token. - :vartype next_link: str - :param compute_type: Required. Constant filled by server. - :type compute_type: str - :ivar nodes: The collection of returned AmlCompute nodes details. - :vartype nodes: - list[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] - """ - - _validation = { - 'next_link': {'readonly': True}, - 'compute_type': {'required': True}, - 'nodes': {'readonly': True}, - } - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'}, - } - - def __init__(self, **kwargs): - super(AmlComputeNodesInformation, self).__init__(**kwargs) - self.nodes = None - self.compute_type = 'AmlCompute' + self.node_state = None + self.run_id = None class AmlComputeProperties(Model): @@ -487,12 +580,21 @@ class AmlComputeProperties(Model): Variables are only populated by the server, and will be ignored when sending a request. + :param os_type: Compute OS Type. Possible values include: 'Linux', + 'Windows'. Default value: "Linux" . + :type os_type: str or ~azure.mgmt.machinelearningservices.models.OsType :param vm_size: Virtual Machine Size :type vm_size: str :param vm_priority: Virtual Machine priority. Possible values include: 'Dedicated', 'LowPriority' :type vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority + :param virtual_machine_image: Virtual Machine image for AML Compute - + windows only + :type virtual_machine_image: + ~azure.mgmt.machinelearningservices.models.VirtualMachineImage + :param isolated_network: Network is isolated or not + :type isolated_network: bool :param scale_settings: Scale settings for AML Compute :type scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings @@ -503,6 +605,17 @@ class AmlComputeProperties(Model): :param subnet: Subnet. Virtual network subnet resource ID the compute nodes belong to. :type subnet: ~azure.mgmt.machinelearningservices.models.ResourceId + :param remote_login_port_public_access: Close remote Login Access Port. + State of the public SSH port. Possible values are: Disabled - Indicates + that the public ssh port is closed on all nodes of the cluster. Enabled - + Indicates that the public ssh port is open on all nodes of the cluster. + NotSpecified - Indicates that the public ssh port is closed on all nodes + of the cluster if VNet is defined, else is open all public nodes. It can + be default only during cluster creation time, after creation it will be + either enabled or disabled. Possible values include: 'Enabled', + 'Disabled', 'NotSpecified'. Default value: "NotSpecified" . + :type remote_login_port_public_access: str or + ~azure.mgmt.machinelearningservices.models.RemoteLoginPortPublicAccess :ivar allocation_state: Allocation state. Allocation state of the compute. Possible values are: steady - Indicates that the compute is not resizing. There are no changes to the number of compute nodes in the compute in @@ -519,7 +632,7 @@ class AmlComputeProperties(Model): :ivar errors: Errors. Collection of errors encountered by various compute nodes during node setup. :vartype errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar current_node_count: Current node count. The number of compute nodes currently assigned to the compute. :vartype current_node_count: int @@ -533,6 +646,12 @@ class AmlComputeProperties(Model): on the compute. :vartype node_state_counts: ~azure.mgmt.machinelearningservices.models.NodeStateCounts + :param enable_node_public_ip: Enable node public IP. Enable or disable + node public IP address provisioning. Possible values are: Possible values + are: true - Indicates that the compute nodes will have public IPs + provisioned. false - Indicates that the compute nodes will have a private + endpoint and no public IPs. Default value: True . + :type enable_node_public_ip: bool """ _validation = { @@ -545,791 +664,6495 @@ class AmlComputeProperties(Model): } _attribute_map = { + 'os_type': {'key': 'osType', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'vm_priority': {'key': 'vmPriority', 'type': 'str'}, + 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'}, + 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'}, 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'}, 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, + 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'}, 'allocation_state': {'key': 'allocationState', 'type': 'str'}, 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'}, - 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'}, + 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'}, 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'}, + 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'}, } def __init__(self, **kwargs): super(AmlComputeProperties, self).__init__(**kwargs) + self.os_type = kwargs.get('os_type', "Linux") self.vm_size = kwargs.get('vm_size', None) self.vm_priority = kwargs.get('vm_priority', None) + self.virtual_machine_image = kwargs.get('virtual_machine_image', None) + self.isolated_network = kwargs.get('isolated_network', None) self.scale_settings = kwargs.get('scale_settings', None) self.user_account_credentials = kwargs.get('user_account_credentials', None) self.subnet = kwargs.get('subnet', None) + self.remote_login_port_public_access = kwargs.get('remote_login_port_public_access', "NotSpecified") self.allocation_state = None self.allocation_state_transition_time = None self.errors = None self.current_node_count = None self.target_node_count = None self.node_state_counts = None + self.enable_node_public_ip = kwargs.get('enable_node_public_ip', True) -class CloudError(Model): - """CloudError. +class IdentityConfiguration(Model): + """Base definition for identity configuration. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmlToken, ManagedIdentity + + All required parameters must be populated in order to send to Azure. + + :param identity_type: Required. Constant filled by server. + :type identity_type: str """ + _validation = { + 'identity_type': {'required': True}, + } + _attribute_map = { + 'identity_type': {'key': 'identityType', 'type': 'str'}, } + _subtype_map = { + 'identity_type': {'AMLToken': 'AmlToken', 'Managed': 'ManagedIdentity'} + } -class ClusterUpdateParameters(Model): - """AmlCompute update parameters. + def __init__(self, **kwargs): + super(IdentityConfiguration, self).__init__(**kwargs) + self.identity_type = None - :param scale_settings: Scale settings. Desired scale settings for the - amlCompute. - :type scale_settings: - ~azure.mgmt.machinelearningservices.models.ScaleSettings + +class AmlToken(IdentityConfiguration): + """AML Token identity configuration. + + All required parameters must be populated in order to send to Azure. + + :param identity_type: Required. Constant filled by server. + :type identity_type: str """ + _validation = { + 'identity_type': {'required': True}, + } + _attribute_map = { - 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'}, + 'identity_type': {'key': 'identityType', 'type': 'str'}, } def __init__(self, **kwargs): - super(ClusterUpdateParameters, self).__init__(**kwargs) - self.scale_settings = kwargs.get('scale_settings', None) + super(AmlToken, self).__init__(**kwargs) + self.identity_type = 'AMLToken' -class Resource(Model): - """Azure Resource Manager resource envelope. +class AmlUserFeature(Model): + """Features enabled for a workspace. - Variables are only populated by the server, and will be ignored when - sending a request. + :param id: Specifies the feature ID + :type id: str + :param display_name: Specifies the feature name + :type display_name: str + :param description: Describes the feature for user experience + :type description: str + """ - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :ivar identity: The identity of the resource. - :vartype identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(AmlUserFeature, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.display_name = kwargs.get('display_name', None) + self.description = kwargs.get('description', None) + + +class AssetReferenceBase(Model): + """Base definition for asset references. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DataPathAssetReference, IdAssetReference, + OutputPathAssetReference + + All required parameters must be populated in order to send to Azure. + + :param reference_type: Required. Constant filled by server. + :type reference_type: str """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'identity': {'readonly': True}, - 'type': {'readonly': True}, + 'reference_type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + _subtype_map = { + 'reference_type': {'DataPath': 'DataPathAssetReference', 'Id': 'IdAssetReference', 'OutputPath': 'OutputPathAssetReference'} } def __init__(self, **kwargs): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.identity = None - self.location = kwargs.get('location', None) - self.type = None - self.tags = kwargs.get('tags', None) + super(AssetReferenceBase, self).__init__(**kwargs) + self.reference_type = None -class ComputeResource(Resource): - """Machine Learning compute object wrapped into ARM resource envelope. +class AssignedUser(Model): + """A user that can be assigned to a compute instance. - Variables are only populated by the server, and will be ignored when - sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :ivar identity: The identity of the resource. - :vartype identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param properties: Compute properties - :type properties: ~azure.mgmt.machinelearningservices.models.Compute + :param object_id: Required. User’s AAD Object Id. + :type object_id: str + :param tenant_id: Required. User’s AAD Tenant Id. + :type tenant_id: str """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'identity': {'readonly': True}, - 'type': {'readonly': True}, + 'object_id': {'required': True}, + 'tenant_id': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': 'Compute'}, + 'object_id': {'key': 'objectId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } def __init__(self, **kwargs): - super(ComputeResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + super(AssignedUser, self).__init__(**kwargs) + self.object_id = kwargs.get('object_id', None) + self.tenant_id = kwargs.get('tenant_id', None) -class Databricks(Compute): - """A DataFactory compute. +class AutoPauseProperties(Model): + """Auto pause properties. - Variables are only populated by the server, and will be ignored when - sending a request. + :param delay_in_minutes: + :type delay_in_minutes: int + :param enabled: + :type enabled: bool + """ - All required parameters must be populated in order to send to Azure. + _attribute_map = { + 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + } - :param compute_location: Location for the underlying compute - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values - are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible - values include: 'Unknown', 'Updating', 'Creating', 'Deleting', - 'Succeeded', 'Failed', 'Canceled' - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: datetime - :param resource_id: ARM resource id of the underlying compute - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned - by user and brought from outside if true, or machine learning service - provisioned it if false. - :vartype is_attached_compute: bool - :param compute_type: Required. Constant filled by server. - :type compute_type: str - :param properties: - :type properties: - ~azure.mgmt.machinelearningservices.models.DatabricksProperties - """ + def __init__(self, **kwargs): + super(AutoPauseProperties, self).__init__(**kwargs) + self.delay_in_minutes = kwargs.get('delay_in_minutes', None) + self.enabled = kwargs.get('enabled', None) - _validation = { - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - 'compute_type': {'required': True}, - } + +class AutoScaleProperties(Model): + """Auto scale properties. + + :param min_node_count: + :type min_node_count: int + :param enabled: + :type enabled: bool + :param max_node_count: + :type max_node_count: int + """ _attribute_map = { - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, + 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, } def __init__(self, **kwargs): - super(Databricks, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'Databricks' + super(AutoScaleProperties, self).__init__(**kwargs) + self.min_node_count = kwargs.get('min_node_count', None) + self.enabled = kwargs.get('enabled', None) + self.max_node_count = kwargs.get('max_node_count', None) -class DatabricksComputeSecrets(ComputeSecrets): - """Secrets related to a Machine Learning compute based on Databricks. +class OnlineScaleSettings(Model): + """Online deployment scaling configuration. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AutoScaleSettings, ManualScaleSettings All required parameters must be populated in order to send to Azure. - :param compute_type: Required. Constant filled by server. - :type compute_type: str - :param databricks_access_token: access token for databricks account. - :type databricks_access_token: str + :param max_instances: Maximum number of instances for this deployment. + :type max_instances: int + :param min_instances: Minimum number of instances for this deployment. + :type min_instances: int + :param scale_type: Required. Constant filled by server. + :type scale_type: str """ _validation = { - 'compute_type': {'required': True}, + 'scale_type': {'required': True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + 'max_instances': {'key': 'maxInstances', 'type': 'int'}, + 'min_instances': {'key': 'minInstances', 'type': 'int'}, + 'scale_type': {'key': 'scaleType', 'type': 'str'}, + } + + _subtype_map = { + 'scale_type': {'Auto': 'AutoScaleSettings', 'Manual': 'ManualScaleSettings'} } def __init__(self, **kwargs): - super(DatabricksComputeSecrets, self).__init__(**kwargs) - self.databricks_access_token = kwargs.get('databricks_access_token', None) - self.compute_type = 'Databricks' + super(OnlineScaleSettings, self).__init__(**kwargs) + self.max_instances = kwargs.get('max_instances', None) + self.min_instances = kwargs.get('min_instances', None) + self.scale_type = None -class DatabricksProperties(Model): - """DatabricksProperties. +class AutoScaleSettings(OnlineScaleSettings): + """AutoScaleSettings. - :param databricks_access_token: Databricks access token - :type databricks_access_token: str + All required parameters must be populated in order to send to Azure. + + :param max_instances: Maximum number of instances for this deployment. + :type max_instances: int + :param min_instances: Minimum number of instances for this deployment. + :type min_instances: int + :param scale_type: Required. Constant filled by server. + :type scale_type: str + :param polling_interval: The polling interval in ISO 8691 format. Only + supports duration with precision as low as Seconds. + :type polling_interval: timedelta + :param target_utilization_percentage: Target CPU usage for the autoscaler. + :type target_utilization_percentage: int """ + _validation = { + 'scale_type': {'required': True}, + } + _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + 'max_instances': {'key': 'maxInstances', 'type': 'int'}, + 'min_instances': {'key': 'minInstances', 'type': 'int'}, + 'scale_type': {'key': 'scaleType', 'type': 'str'}, + 'polling_interval': {'key': 'pollingInterval', 'type': 'duration'}, + 'target_utilization_percentage': {'key': 'targetUtilizationPercentage', 'type': 'int'}, } def __init__(self, **kwargs): - super(DatabricksProperties, self).__init__(**kwargs) - self.databricks_access_token = kwargs.get('databricks_access_token', None) + super(AutoScaleSettings, self).__init__(**kwargs) + self.polling_interval = kwargs.get('polling_interval', None) + self.target_utilization_percentage = kwargs.get('target_utilization_percentage', None) + self.scale_type = 'Auto' -class DataFactory(Compute): - """A DataFactory compute. +class DatastoreContents(Model): + """Base definition for datastore contents configuration. - Variables are only populated by the server, and will be ignored when - sending a request. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureBlobContents, AzureDataLakeGen1Contents, + AzureDataLakeGen2Contents, AzureFileContents, AzurePostgreSqlContents, + AzureSqlDatabaseContents, GlusterFsContents All required parameters must be populated in order to send to Azure. - :param compute_location: Location for the underlying compute - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values - are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible - values include: 'Unknown', 'Updating', 'Creating', 'Deleting', - 'Succeeded', 'Failed', 'Canceled' - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: datetime - :param resource_id: ARM resource id of the underlying compute - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned - by user and brought from outside if true, or machine learning service - provisioned it if false. - :vartype is_attached_compute: bool - :param compute_type: Required. Constant filled by server. - :type compute_type: str + :param contents_type: Required. Constant filled by server. + :type contents_type: str """ _validation = { - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - 'compute_type': {'required': True}, + 'contents_type': {'required': True}, } _attribute_map = { - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, } - def __init__(self, **kwargs): - super(DataFactory, self).__init__(**kwargs) - self.compute_type = 'DataFactory' + _subtype_map = { + 'contents_type': {'AzureBlob': 'AzureBlobContents', 'AzureDataLakeGen1': 'AzureDataLakeGen1Contents', 'AzureDataLakeGen2': 'AzureDataLakeGen2Contents', 'AzureFile': 'AzureFileContents', 'AzurePostgreSql': 'AzurePostgreSqlContents', 'AzureSqlDatabase': 'AzureSqlDatabaseContents', 'GlusterFs': 'GlusterFsContents'} + } + def __init__(self, **kwargs): + super(DatastoreContents, self).__init__(**kwargs) + self.contents_type = None -class DataLakeAnalytics(Compute): - """A DataLakeAnalytics compute. - Variables are only populated by the server, and will be ignored when - sending a request. +class AzureBlobContents(DatastoreContents): + """Azure Blob datastore configuration. All required parameters must be populated in order to send to Azure. - :param compute_location: Location for the underlying compute - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values - are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible - values include: 'Unknown', 'Updating', 'Creating', 'Deleting', - 'Succeeded', 'Failed', 'Canceled' - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: datetime - :param resource_id: ARM resource id of the underlying compute - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned - by user and brought from outside if true, or machine learning service - provisioned it if false. - :vartype is_attached_compute: bool - :param compute_type: Required. Constant filled by server. - :type compute_type: str - :param properties: - :type properties: - ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsProperties + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param account_name: Required. Storage account name. + :type account_name: str + :param container_name: Required. Storage account container name. + :type container_name: str + :param credentials: Required. Account credentials. + :type credentials: + ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :param endpoint: Required. Azure cloud endpoint for the storage account. + :type endpoint: str + :param protocol: Required. Protocol used to communicate with the storage + account. + :type protocol: str """ _validation = { - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - 'compute_type': {'required': True}, + 'contents_type': {'required': True}, + 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'credentials': {'required': True}, + 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, } _attribute_map = { - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'protocol': {'key': 'protocol', 'type': 'str'}, } def __init__(self, **kwargs): - super(DataLakeAnalytics, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'DataLakeAnalytics' + super(AzureBlobContents, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.container_name = kwargs.get('container_name', None) + self.credentials = kwargs.get('credentials', None) + self.endpoint = kwargs.get('endpoint', None) + self.protocol = kwargs.get('protocol', None) + self.contents_type = 'AzureBlob' -class DataLakeAnalyticsProperties(Model): - """DataLakeAnalyticsProperties. +class AzureDataLakeGen1Contents(DatastoreContents): + """Azure Data Lake Gen1 datastore configuration. - :param data_lake_store_account_name: DataLake Store Account Name - :type data_lake_store_account_name: str + All required parameters must be populated in order to send to Azure. + + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param credentials: Required. Account credentials. + :type credentials: + ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :param store_name: Required. Azure Data Lake store name. + :type store_name: str """ + _validation = { + 'contents_type': {'required': True}, + 'credentials': {'required': True}, + 'store_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + _attribute_map = { - 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'store_name': {'key': 'storeName', 'type': 'str'}, } def __init__(self, **kwargs): - super(DataLakeAnalyticsProperties, self).__init__(**kwargs) - self.data_lake_store_account_name = kwargs.get('data_lake_store_account_name', None) + super(AzureDataLakeGen1Contents, self).__init__(**kwargs) + self.credentials = kwargs.get('credentials', None) + self.store_name = kwargs.get('store_name', None) + self.contents_type = 'AzureDataLakeGen1' -class ErrorDetail(Model): - """Error detail information. +class AzureDataLakeGen2Contents(DatastoreContents): + """Azure Data Lake Gen2 datastore configuration. All required parameters must be populated in order to send to Azure. - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param account_name: Required. Storage account name. + :type account_name: str + :param container_name: Required. Storage account container name. + :type container_name: str + :param credentials: Required. Account credentials. + :type credentials: + ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :param endpoint: Required. Azure cloud endpoint for the storage account. + :type endpoint: str + :param protocol: Required. Protocol used to communicate with the storage + account. + :type protocol: str """ _validation = { - 'code': {'required': True}, - 'message': {'required': True}, + 'contents_type': {'required': True}, + 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'credentials': {'required': True}, + 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'protocol': {'key': 'protocol', 'type': 'str'}, } def __init__(self, **kwargs): - super(ErrorDetail, self).__init__(**kwargs) - self.code = kwargs.get('code', None) - self.message = kwargs.get('message', None) + super(AzureDataLakeGen2Contents, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.container_name = kwargs.get('container_name', None) + self.credentials = kwargs.get('credentials', None) + self.endpoint = kwargs.get('endpoint', None) + self.protocol = kwargs.get('protocol', None) + self.contents_type = 'AzureDataLakeGen2' -class ErrorResponse(Model): - """Error response information. +class Resource(Model): + """Resource. + + Common fields that are returned in the response for all Azure Resource + Manager resources. Variables are only populated by the server, and will be ignored when sending a request. - :ivar code: Error code. - :vartype code: str - :ivar message: Error message. - :vartype message: str - :ivar details: An array of error detail objects. - :vartype details: - list[~azure.mgmt.machinelearningservices.models.ErrorDetail] + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - 'details': {'readonly': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, **kwargs): - super(ErrorResponse, self).__init__(**kwargs) - self.code = None - self.message = None - self.details = None + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None -class HDInsight(Compute): - """A HDInsight compute. +class AzureEntityResource(Resource): + """Entity Resource. + + The resource model definition for an Azure Resource Manager resource with + an etag. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :param compute_location: Location for the underlying compute - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values - are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible - values include: 'Unknown', 'Updating', 'Creating', 'Deleting', - 'Succeeded', 'Failed', 'Canceled' - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: datetime - :param resource_id: ARM resource id of the underlying compute - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned - by user and brought from outside if true, or machine learning service - provisioned it if false. - :vartype is_attached_compute: bool - :param compute_type: Required. Constant filled by server. - :type compute_type: str - :param properties: - :type properties: - ~azure.mgmt.machinelearningservices.models.HDInsightProperties + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str """ _validation = { - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - 'compute_type': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, } _attribute_map = { - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, } def __init__(self, **kwargs): - super(HDInsight, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'HDInsight' + super(AzureEntityResource, self).__init__(**kwargs) + self.etag = None -class HDInsightProperties(Model): - """HDInsightProperties. +class AzureFileContents(DatastoreContents): + """Azure File datastore configuration. - :param ssh_port: Port open for ssh connections on the master node of the - cluster. - :type ssh_port: int - :param address: Public IP address of the master node of the cluster. - :type address: str - :param administrator_account: Admin credentials for master node of the - cluster - :type administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + All required parameters must be populated in order to send to Azure. + + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param account_name: Required. Storage account name. + :type account_name: str + :param container_name: Required. Storage account container name. + :type container_name: str + :param credentials: Required. Account credentials. + :type credentials: + ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :param endpoint: Required. Azure cloud endpoint for the storage account. + :type endpoint: str + :param protocol: Required. Protocol used to communicate with the storage + account. + :type protocol: str """ + _validation = { + 'contents_type': {'required': True}, + 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'credentials': {'required': True}, + 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + _attribute_map = { - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'protocol': {'key': 'protocol', 'type': 'str'}, } def __init__(self, **kwargs): - super(HDInsightProperties, self).__init__(**kwargs) - self.ssh_port = kwargs.get('ssh_port', None) - self.address = kwargs.get('address', None) - self.administrator_account = kwargs.get('administrator_account', None) + super(AzureFileContents, self).__init__(**kwargs) + self.account_name = kwargs.get('account_name', None) + self.container_name = kwargs.get('container_name', None) + self.credentials = kwargs.get('credentials', None) + self.endpoint = kwargs.get('endpoint', None) + self.protocol = kwargs.get('protocol', None) + self.contents_type = 'AzureFile' -class Identity(Model): - """Identity for the resource. +class AzurePostgreSqlContents(DatastoreContents): + """Azure Postgre SQL datastore configuration. - Variables are only populated by the server, and will be ignored when - sending a request. + All required parameters must be populated in order to send to Azure. - :ivar principal_id: The principal ID of resource identity. - :vartype principal_id: str - :ivar tenant_id: The tenant ID of resource. - :vartype tenant_id: str - :param type: The identity type. Possible values include: 'SystemAssigned' - :type type: str or - ~azure.mgmt.machinelearningservices.models.ResourceIdentityType + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param credentials: Required. Account credentials. + :type credentials: + ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :param database_name: Required. Azure SQL database name. + :type database_name: str + :param enable_ssl: Whether the Azure PostgreSQL server requires SSL. + :type enable_ssl: bool + :param endpoint: Required. Azure cloud endpoint for the database. + :type endpoint: str + :param port_number: Required. Azure SQL server port. + :type port_number: int + :param server_name: Required. Azure SQL server name. + :type server_name: str """ _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, + 'contents_type': {'required': True}, + 'credentials': {'required': True}, + 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'port_number': {'required': True}, + 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, } _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'ResourceIdentityType'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'enable_ssl': {'key': 'enableSSL', 'type': 'bool'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'port_number': {'key': 'portNumber', 'type': 'int'}, + 'server_name': {'key': 'serverName', 'type': 'str'}, } def __init__(self, **kwargs): - super(Identity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None - self.type = kwargs.get('type', None) + super(AzurePostgreSqlContents, self).__init__(**kwargs) + self.credentials = kwargs.get('credentials', None) + self.database_name = kwargs.get('database_name', None) + self.enable_ssl = kwargs.get('enable_ssl', None) + self.endpoint = kwargs.get('endpoint', None) + self.port_number = kwargs.get('port_number', None) + self.server_name = kwargs.get('server_name', None) + self.contents_type = 'AzurePostgreSql' -class ListWorkspaceKeysResult(Model): - """ListWorkspaceKeysResult. +class AzureSqlDatabaseContents(DatastoreContents): + """Azure SQL Database datastore configuration. - Variables are only populated by the server, and will be ignored when - sending a request. + All required parameters must be populated in order to send to Azure. - :ivar user_storage_key: - :vartype user_storage_key: str - :ivar user_storage_resource_id: - :vartype user_storage_resource_id: str - :ivar app_insights_instrumentation_key: - :vartype app_insights_instrumentation_key: str - :ivar container_registry_credentials: - :vartype container_registry_credentials: - ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param credentials: Required. Account credentials. + :type credentials: + ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :param database_name: Required. Azure SQL database name. + :type database_name: str + :param endpoint: Required. Azure cloud endpoint for the database. + :type endpoint: str + :param port_number: Required. Azure SQL server port. + :type port_number: int + :param server_name: Required. Azure SQL server name. + :type server_name: str """ _validation = { - 'user_storage_key': {'readonly': True}, - 'user_storage_resource_id': {'readonly': True}, - 'app_insights_instrumentation_key': {'readonly': True}, - 'container_registry_credentials': {'readonly': True}, + 'contents_type': {'required': True}, + 'credentials': {'required': True}, + 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'port_number': {'required': True}, + 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, } _attribute_map = { - 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, - 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'}, - 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, - 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'port_number': {'key': 'portNumber', 'type': 'int'}, + 'server_name': {'key': 'serverName', 'type': 'str'}, } def __init__(self, **kwargs): - super(ListWorkspaceKeysResult, self).__init__(**kwargs) - self.user_storage_key = None - self.user_storage_resource_id = None - self.app_insights_instrumentation_key = None - self.container_registry_credentials = None + super(AzureSqlDatabaseContents, self).__init__(**kwargs) + self.credentials = kwargs.get('credentials', None) + self.database_name = kwargs.get('database_name', None) + self.endpoint = kwargs.get('endpoint', None) + self.port_number = kwargs.get('port_number', None) + self.server_name = kwargs.get('server_name', None) + self.contents_type = 'AzureSqlDatabase' -class MachineLearningServiceError(Model): - """Wrapper for error response to follow ARM guidelines. +class EarlyTerminationPolicy(Model): + """Early termination policies enable canceling poor-performing runs before + they complete. - Variables are only populated by the server, and will be ignored when - sending a request. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BanditPolicy, MedianStoppingPolicy, + TruncationSelectionPolicy + + All required parameters must be populated in order to send to Azure. - :ivar error: The error response. - :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse + :param delay_evaluation: Number of intervals by which to delay the first + evaluation. + :type delay_evaluation: int + :param evaluation_interval: Interval (number of runs) between policy + evaluations. + :type evaluation_interval: int + :param policy_type: Required. Constant filled by server. + :type policy_type: str """ _validation = { - 'error': {'readonly': True}, + 'policy_type': {'required': True}, } _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorResponse'}, + 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, + 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, + 'policy_type': {'key': 'policyType', 'type': 'str'}, + } + + _subtype_map = { + 'policy_type': {'Bandit': 'BanditPolicy', 'MedianStopping': 'MedianStoppingPolicy', 'TruncationSelection': 'TruncationSelectionPolicy'} } def __init__(self, **kwargs): - super(MachineLearningServiceError, self).__init__(**kwargs) - self.error = None + super(EarlyTerminationPolicy, self).__init__(**kwargs) + self.delay_evaluation = kwargs.get('delay_evaluation', None) + self.evaluation_interval = kwargs.get('evaluation_interval', None) + self.policy_type = None -class MachineLearningServiceErrorException(HttpOperationError): - """Server responsed with exception of type: 'MachineLearningServiceError'. +class BanditPolicy(EarlyTerminationPolicy): + """Defines an early termination policy based on slack criteria, and a + frequency and delay interval for evaluation. - :param deserialize: A deserializer - :param response: Server response to be deserialized. + All required parameters must be populated in order to send to Azure. + + :param delay_evaluation: Number of intervals by which to delay the first + evaluation. + :type delay_evaluation: int + :param evaluation_interval: Interval (number of runs) between policy + evaluations. + :type evaluation_interval: int + :param policy_type: Required. Constant filled by server. + :type policy_type: str + :param slack_amount: Absolute distance allowed from the best performing + run. + :type slack_amount: float + :param slack_factor: Ratio of the allowed distance from the best + performing run. + :type slack_factor: float """ - def __init__(self, deserialize, response, *args): + _validation = { + 'policy_type': {'required': True}, + } - super(MachineLearningServiceErrorException, self).__init__(deserialize, response, 'MachineLearningServiceError', *args) + _attribute_map = { + 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, + 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, + 'policy_type': {'key': 'policyType', 'type': 'str'}, + 'slack_amount': {'key': 'slackAmount', 'type': 'float'}, + 'slack_factor': {'key': 'slackFactor', 'type': 'float'}, + } + def __init__(self, **kwargs): + super(BanditPolicy, self).__init__(**kwargs) + self.slack_amount = kwargs.get('slack_amount', None) + self.slack_factor = kwargs.get('slack_factor', None) + self.policy_type = 'Bandit' + + +class BatchDeployment(Model): + """Batch inference settings per deployment. + + :param code_configuration: Code configuration for the endpoint deployment. + :type code_configuration: + ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :param compute: Configuration for compute binding. + :type compute: + ~azure.mgmt.machinelearningservices.models.ComputeConfiguration + :param description: Description of the endpoint deployment. + :type description: str + :param environment_id: ARM resource ID of the environment specification + for the endpoint deployment. + :type environment_id: str + :param environment_variables: Environment variables configuration for the + deployment. + :type environment_variables: dict[str, str] + :param error_threshold: Error threshold, if the error count for the entire + input goes above this value, + the batch inference will be aborted. Range is [-1, int.MaxValue]. + For FileDataset, this value is the count of file failures. + For TabularDataset, this value is the count of record failures. + If set to -1 (the lower bound), all failures during batch inference will + be ignored. + :type error_threshold: int + :param logging_level: Logging level for batch inference operation. + Possible values include: 'Info', 'Warning', 'Debug' + :type logging_level: str or + ~azure.mgmt.machinelearningservices.models.BatchLoggingLevel + :param mini_batch_size: Size of the mini-batch passed to each batch + invocation. + For FileDataset, this is the number of files per mini-batch. + For TabularDataset, this is the size of the records in bytes, per + mini-batch. + :type mini_batch_size: long + :param model: Reference to the model asset for the endpoint deployment. + :type model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase + :param output_configuration: Output configuration for the batch inference + operation. + :type output_configuration: + ~azure.mgmt.machinelearningservices.models.BatchOutputConfiguration + :param partition_keys: Partition keys list used for Named partitioning. + :type partition_keys: list[str] + :param properties: Property dictionary. Properties can be added, but not + removed or altered. + :type properties: dict[str, str] + :param retry_settings: Retry Settings for the batch inference operation. + :type retry_settings: + ~azure.mgmt.machinelearningservices.models.BatchRetrySettings + """ -class NodeStateCounts(Model): - """Counts of various compute node states on the amlCompute. + _attribute_map = { + 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'error_threshold': {'key': 'errorThreshold', 'type': 'int'}, + 'logging_level': {'key': 'loggingLevel', 'type': 'str'}, + 'mini_batch_size': {'key': 'miniBatchSize', 'type': 'long'}, + 'model': {'key': 'model', 'type': 'AssetReferenceBase'}, + 'output_configuration': {'key': 'outputConfiguration', 'type': 'BatchOutputConfiguration'}, + 'partition_keys': {'key': 'partitionKeys', 'type': '[str]'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'retry_settings': {'key': 'retrySettings', 'type': 'BatchRetrySettings'}, + } + + def __init__(self, **kwargs): + super(BatchDeployment, self).__init__(**kwargs) + self.code_configuration = kwargs.get('code_configuration', None) + self.compute = kwargs.get('compute', None) + self.description = kwargs.get('description', None) + self.environment_id = kwargs.get('environment_id', None) + self.environment_variables = kwargs.get('environment_variables', None) + self.error_threshold = kwargs.get('error_threshold', None) + self.logging_level = kwargs.get('logging_level', None) + self.mini_batch_size = kwargs.get('mini_batch_size', None) + self.model = kwargs.get('model', None) + self.output_configuration = kwargs.get('output_configuration', None) + self.partition_keys = kwargs.get('partition_keys', None) + self.properties = kwargs.get('properties', None) + self.retry_settings = kwargs.get('retry_settings', None) + + +class TrackedResource(Resource): + """Tracked Resource. + + The resource model definition for an Azure Resource Manager tracked top + level resource which has 'tags' and a 'location'. Variables are only populated by the server, and will be ignored when sending a request. - :ivar idle_node_count: Idle node count. Number of compute nodes in idle - state. - :vartype idle_node_count: int - :ivar running_node_count: Running node count. Number of compute nodes - which are running jobs. - :vartype running_node_count: int - :ivar preparing_node_count: Preparing node count. Number of compute nodes - which are being prepared. - :vartype preparing_node_count: int - :ivar unusable_node_count: Unusable node count. Number of compute nodes - which are in unusable state. - :vartype unusable_node_count: int - :ivar leaving_node_count: Leaving node count. Number of compute nodes - which are leaving the amlCompute. - :vartype leaving_node_count: int - :ivar preempted_node_count: Preempted node count. Number of compute nodes - which are in preempted state. - :vartype preempted_node_count: int + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives + :type location: str """ _validation = { - 'idle_node_count': {'readonly': True}, - 'running_node_count': {'readonly': True}, - 'preparing_node_count': {'readonly': True}, - 'unusable_node_count': {'readonly': True}, - 'leaving_node_count': {'readonly': True}, - 'preempted_node_count': {'readonly': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, } _attribute_map = { - 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, - 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, - 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, - 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, - 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, - 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, } def __init__(self, **kwargs): - super(NodeStateCounts, self).__init__(**kwargs) - self.idle_node_count = None - self.running_node_count = None - self.preparing_node_count = None - self.unusable_node_count = None - self.leaving_node_count = None - self.preempted_node_count = None + super(TrackedResource, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.location = kwargs.get('location', None) -class Operation(Model): - """Azure Machine Learning workspace REST API operation. +class BatchDeploymentTrackedResource(TrackedResource): + """BatchDeploymentTrackedResource. - :param name: Operation name: {provider}/{resource}/{operation} - :type name: str - :param display: Display name of operation - :type display: ~azure.mgmt.machinelearningservices.models.OperationDisplay + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives + :type location: str + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.BatchDeployment + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData """ + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'BatchDeployment'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, } def __init__(self, **kwargs): - super(Operation, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display = kwargs.get('display', None) + super(BatchDeploymentTrackedResource, self).__init__(**kwargs) + self.identity = kwargs.get('identity', None) + self.kind = kwargs.get('kind', None) + self.properties = kwargs.get('properties', None) + self.system_data = None -class OperationDisplay(Model): - """Display name of operation. +class BatchEndpoint(Model): + """Batch endpoint configuration. - :param provider: The resource provider name: - Microsoft.MachineLearningExperimentation - :type provider: str - :param resource: The resource on which the operation is performed. - :type resource: str - :param operation: The operation that users can perform. - :type operation: str - :param description: The description for the operation. + Variables are only populated by the server, and will be ignored when + sending a request. + + :param auth_mode: Enum to determine endpoint authentication mode. Possible + values include: 'AMLToken', 'Key', 'AADToken' + :type auth_mode: str or + ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :param description: Description of the inference endpoint. :type description: str + :param keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be + retrieved using the ListKeys API. + :type keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :param properties: Property dictionary. Properties can be added, but not + removed or altered. + :type properties: dict[str, str] + :ivar scoring_uri: Endpoint URI. + :vartype scoring_uri: str + :ivar swagger_uri: Endpoint Swagger URI. + :vartype swagger_uri: str + :param traffic: Traffic rules on how the traffic will be routed across + deployments. + :type traffic: dict[str, int] """ + _validation = { + 'scoring_uri': {'readonly': True}, + 'swagger_uri': {'readonly': True}, + } + _attribute_map = { - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, + 'auth_mode': {'key': 'authMode', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, + 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, + 'traffic': {'key': 'traffic', 'type': '{int}'}, } def __init__(self, **kwargs): - super(OperationDisplay, self).__init__(**kwargs) - self.provider = kwargs.get('provider', None) - self.resource = kwargs.get('resource', None) - self.operation = kwargs.get('operation', None) + super(BatchEndpoint, self).__init__(**kwargs) + self.auth_mode = kwargs.get('auth_mode', None) self.description = kwargs.get('description', None) + self.keys = kwargs.get('keys', None) + self.properties = kwargs.get('properties', None) + self.scoring_uri = None + self.swagger_uri = None + self.traffic = kwargs.get('traffic', None) -class Password(Model): - """Password. +class BatchEndpointTrackedResource(TrackedResource): + """BatchEndpointTrackedResource. Variables are only populated by the server, and will be ignored when sending a request. - :ivar name: + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource :vartype name: str - :ivar value: - :vartype value: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives + :type location: str + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData """ _validation = { + 'id': {'readonly': True}, 'name': {'readonly': True}, - 'value': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, } _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'BatchEndpoint'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, } def __init__(self, **kwargs): - super(Password, self).__init__(**kwargs) - self.name = None - self.value = None + super(BatchEndpointTrackedResource, self).__init__(**kwargs) + self.identity = kwargs.get('identity', None) + self.kind = kwargs.get('kind', None) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class BatchOutputConfiguration(Model): + """Batch inference output configuration. + + :param append_row_file_name: Customized output file name for append_row + output action. + :type append_row_file_name: str + :param output_action: Indicates how the output will be organized. Possible + values include: 'SummaryOnly', 'AppendRow' + :type output_action: str or + ~azure.mgmt.machinelearningservices.models.BatchOutputAction + """ + + _attribute_map = { + 'append_row_file_name': {'key': 'appendRowFileName', 'type': 'str'}, + 'output_action': {'key': 'outputAction', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(BatchOutputConfiguration, self).__init__(**kwargs) + self.append_row_file_name = kwargs.get('append_row_file_name', None) + self.output_action = kwargs.get('output_action', None) + + +class BatchRetrySettings(Model): + """Retry settings for a batch inference operation. + + :param max_retries: Maximum retry count for a mini-batch + :type max_retries: int + :param timeout: Invocation timeout for a mini-batch, in ISO 8601 format. + :type timeout: timedelta + """ + + _attribute_map = { + 'max_retries': {'key': 'maxRetries', 'type': 'int'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + } + + def __init__(self, **kwargs): + super(BatchRetrySettings, self).__init__(**kwargs) + self.max_retries = kwargs.get('max_retries', None) + self.timeout = kwargs.get('timeout', None) + + +class CertificateDatastoreCredentials(DatastoreCredentials): + """Certificate datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + :param authority_url: Authority URL used for authentication. + :type authority_url: str + :param client_id: Required. Service principal client ID. + :type client_id: str + :param resource_uri: Resource the service principal has access to. + :type resource_uri: str + :param secrets: Service principal secrets. + :type secrets: + ~azure.mgmt.machinelearningservices.models.CertificateDatastoreSecrets + :param tenant_id: Required. ID of the tenant to which the service + principal belongs. + :type tenant_id: str + :param thumbprint: Required. Thumbprint of the certificate used for + authentication. + :type thumbprint: str + """ + + _validation = { + 'credentials_type': {'required': True}, + 'client_id': {'required': True}, + 'tenant_id': {'required': True}, + 'thumbprint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'authority_url': {'key': 'authorityUrl', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'resource_uri': {'key': 'resourceUri', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'CertificateDatastoreSecrets'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'thumbprint': {'key': 'thumbprint', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CertificateDatastoreCredentials, self).__init__(**kwargs) + self.authority_url = kwargs.get('authority_url', None) + self.client_id = kwargs.get('client_id', None) + self.resource_uri = kwargs.get('resource_uri', None) + self.secrets = kwargs.get('secrets', None) + self.tenant_id = kwargs.get('tenant_id', None) + self.thumbprint = kwargs.get('thumbprint', None) + self.credentials_type = 'Certificate' + + +class CertificateDatastoreSecrets(DatastoreSecrets): + """Datastore certificate secrets. + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + :param certificate: Service principal certificate. + :type certificate: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'certificate': {'key': 'certificate', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CertificateDatastoreSecrets, self).__init__(**kwargs) + self.certificate = kwargs.get('certificate', None) + self.secrets_type = 'Certificate' + + +class CloudError(Model): + """CloudError. + """ + + _attribute_map = { + } + + +class ClusterUpdateParameters(Model): + """AmlCompute update parameters. + + :param scale_settings: Scale settings. Desired scale settings for the + amlCompute. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.ScaleSettings + """ + + _attribute_map = { + 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'}, + } + + def __init__(self, **kwargs): + super(ClusterUpdateParameters, self).__init__(**kwargs) + self.scale_settings = kwargs.get('scale_settings', None) + + +class ExportSummary(Model): + """ExportSummary. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CocoExportSummary, CsvExportSummary, DatasetExportSummary + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_time_utc: The time when the export was completed. + :vartype end_time_utc: datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: long + :ivar labeling_job_id: Name and identifier of the job containing exported + labels. + :vartype labeling_job_id: str + :ivar start_time_utc: The time when the export was requested. + :vartype start_time_utc: datetime + :param format: Required. Constant filled by server. + :type format: str + """ + + _validation = { + 'end_time_utc': {'readonly': True}, + 'exported_row_count': {'readonly': True}, + 'labeling_job_id': {'readonly': True}, + 'start_time_utc': {'readonly': True}, + 'format': {'required': True}, + } + + _attribute_map = { + 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'}, + 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, + 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, + 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'}, + 'format': {'key': 'format', 'type': 'str'}, + } + + _subtype_map = { + 'format': {'Coco': 'CocoExportSummary', 'CSV': 'CsvExportSummary', 'Dataset': 'DatasetExportSummary'} + } + + def __init__(self, **kwargs): + super(ExportSummary, self).__init__(**kwargs) + self.end_time_utc = None + self.exported_row_count = None + self.labeling_job_id = None + self.start_time_utc = None + self.format = None + + +class CocoExportSummary(ExportSummary): + """CocoExportSummary. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_time_utc: The time when the export was completed. + :vartype end_time_utc: datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: long + :ivar labeling_job_id: Name and identifier of the job containing exported + labels. + :vartype labeling_job_id: str + :ivar start_time_utc: The time when the export was requested. + :vartype start_time_utc: datetime + :param format: Required. Constant filled by server. + :type format: str + :ivar container_name: The container name to which the labels will be + exported. + :vartype container_name: str + :ivar snapshot_path: The output path where the labels will be exported. + :vartype snapshot_path: str + """ + + _validation = { + 'end_time_utc': {'readonly': True}, + 'exported_row_count': {'readonly': True}, + 'labeling_job_id': {'readonly': True}, + 'start_time_utc': {'readonly': True}, + 'format': {'required': True}, + 'container_name': {'readonly': True}, + 'snapshot_path': {'readonly': True}, + } + + _attribute_map = { + 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'}, + 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, + 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, + 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'}, + 'format': {'key': 'format', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CocoExportSummary, self).__init__(**kwargs) + self.container_name = None + self.snapshot_path = None + self.format = 'Coco' + + +class CodeConfiguration(Model): + """Configuration for a scoring code asset. + + All required parameters must be populated in order to send to Azure. + + :param code_id: ARM resource ID of the code asset. + :type code_id: str + :param scoring_script: Required. The script to execute on startup. eg. + "score.py" + :type scoring_script: str + """ + + _validation = { + 'scoring_script': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'code_id': {'key': 'codeId', 'type': 'str'}, + 'scoring_script': {'key': 'scoringScript', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CodeConfiguration, self).__init__(**kwargs) + self.code_id = kwargs.get('code_id', None) + self.scoring_script = kwargs.get('scoring_script', None) + + +class CodeContainer(Model): + """Container for code asset versions. + + :param description: The asset description text. + :type description: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(CodeContainer, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.properties = kwargs.get('properties', None) + self.tags = kwargs.get('tags', None) + + +class CodeContainerResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.CodeContainer + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'CodeContainer'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(CodeContainerResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class CodeVersion(Model): + """Code asset version details. + + All required parameters must be populated in order to send to Azure. + + :param datastore_id: ARM resource ID of the datastore where the asset is + located. + :type datastore_id: str + :param description: The asset description text. + :type description: str + :param is_anonymous: If the name version are system generated (anonymous + registration). + :type is_anonymous: bool + :param path: Required. The path of the file/directory in the datastore. + :type path: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _validation = { + 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'path': {'key': 'path', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(CodeVersion, self).__init__(**kwargs) + self.datastore_id = kwargs.get('datastore_id', None) + self.description = kwargs.get('description', None) + self.is_anonymous = kwargs.get('is_anonymous', None) + self.path = kwargs.get('path', None) + self.properties = kwargs.get('properties', None) + self.tags = kwargs.get('tags', None) + + +class CodeVersionResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.CodeVersion + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'CodeVersion'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(CodeVersionResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class JobBase(Model): + """Base definition for a job. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CommandJob, SweepJob + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param description: The asset description text. + :type description: str + :ivar interaction_endpoints: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of + FileStreamObject. + :vartype interaction_endpoints: dict[str, + ~azure.mgmt.machinelearningservices.models.JobEndpoint] + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :ivar provisioning_state: Specifies the job provisioning state. Possible + values include: 'Succeeded', 'Failed', 'Canceled', 'InProgress' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.JobProvisioningState + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + :param job_type: Required. Constant filled by server. + :type job_type: str + """ + + _validation = { + 'interaction_endpoints': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'job_type': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + } + + _subtype_map = { + 'job_type': {'Command': 'CommandJob', 'Sweep': 'SweepJob'} + } + + def __init__(self, **kwargs): + super(JobBase, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.interaction_endpoints = None + self.properties = kwargs.get('properties', None) + self.provisioning_state = None + self.tags = kwargs.get('tags', None) + self.job_type = None + + +class CommandJob(JobBase): + """Command job definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param description: The asset description text. + :type description: str + :ivar interaction_endpoints: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of + FileStreamObject. + :vartype interaction_endpoints: dict[str, + ~azure.mgmt.machinelearningservices.models.JobEndpoint] + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :ivar provisioning_state: Specifies the job provisioning state. Possible + values include: 'Succeeded', 'Failed', 'Canceled', 'InProgress' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.JobProvisioningState + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + :param job_type: Required. Constant filled by server. + :type job_type: str + :param code_id: ARM resource ID of the code asset. + :type code_id: str + :param command: Required. The command to execute on startup of the job. + eg. "python train.py" + :type command: str + :param compute: Required. Compute binding for the job. + :type compute: + ~azure.mgmt.machinelearningservices.models.ComputeConfiguration + :param distribution: Distribution configuration of the job. If set, this + should be one of Mpi, Tensorflow, PyTorch, or null. + :type distribution: + ~azure.mgmt.machinelearningservices.models.DistributionConfiguration + :param environment_id: The ARM resource ID of the Environment + specification for the job. + :type environment_id: str + :param environment_variables: Environment variables included in the job. + :type environment_variables: dict[str, str] + :param experiment_name: The name of the experiment the job belongs to. If + not set, the job is placed in the "Default" experiment. + :type experiment_name: str + :param identity: Identity configuration. If set, this should be one of + AmlToken, ManagedIdentity, or null. + Defaults to AmlToken if null. + :type identity: + ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :param input_data_bindings: Mapping of input data bindings used in the + job. + :type input_data_bindings: dict[str, + ~azure.mgmt.machinelearningservices.models.InputDataBinding] + :ivar output: Location of the job output logs and artifacts. + :vartype output: ~azure.mgmt.machinelearningservices.models.JobOutput + :param output_data_bindings: Mapping of output data bindings used in the + job. + :type output_data_bindings: dict[str, + ~azure.mgmt.machinelearningservices.models.OutputDataBinding] + :ivar parameters: Input parameters. + :vartype parameters: dict[str, object] + :param priority: Job priority for scheduling policy. Only applies to + AMLCompute. + Private preview feature and only available to users on the allow list. + :type priority: int + :ivar status: Status of the job. Possible values include: 'NotStarted', + 'Starting', 'Provisioning', 'Preparing', 'Queued', 'Running', + 'Finalizing', 'CancelRequested', 'Completed', 'Failed', 'Canceled', + 'NotResponding', 'Paused', 'Unknown' + :vartype status: str or + ~azure.mgmt.machinelearningservices.models.JobStatus + :param timeout: The max run duration in ISO 8601 format, after which the + job will be cancelled. Only supports duration with precision as low as + Seconds. + :type timeout: timedelta + """ + + _validation = { + 'interaction_endpoints': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'job_type': {'required': True}, + 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + 'compute': {'required': True}, + 'output': {'readonly': True}, + 'parameters': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'code_id': {'key': 'codeId', 'type': 'str'}, + 'command': {'key': 'command', 'type': 'str'}, + 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, + 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'experiment_name': {'key': 'experimentName', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, + 'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'}, + 'output': {'key': 'output', 'type': 'JobOutput'}, + 'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'priority': {'key': 'priority', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + } + + def __init__(self, **kwargs): + super(CommandJob, self).__init__(**kwargs) + self.code_id = kwargs.get('code_id', None) + self.command = kwargs.get('command', None) + self.compute = kwargs.get('compute', None) + self.distribution = kwargs.get('distribution', None) + self.environment_id = kwargs.get('environment_id', None) + self.environment_variables = kwargs.get('environment_variables', None) + self.experiment_name = kwargs.get('experiment_name', None) + self.identity = kwargs.get('identity', None) + self.input_data_bindings = kwargs.get('input_data_bindings', None) + self.output = None + self.output_data_bindings = kwargs.get('output_data_bindings', None) + self.parameters = None + self.priority = kwargs.get('priority', None) + self.status = None + self.timeout = kwargs.get('timeout', None) + self.job_type = 'Command' + + +class ComputeConfiguration(Model): + """Configuration for compute binding. + + :param instance_count: Number of instances or nodes. + :type instance_count: int + :param instance_type: SKU type to run on. + :type instance_type: str + :param is_local: Set to true for jobs running on local compute. + :type is_local: bool + :param location: Location for virtual cluster run. + :type location: str + :param properties: Additional properties. + :type properties: dict[str, str] + :param target: ARM resource ID of the compute resource. + :type target: str + """ + + _attribute_map = { + 'instance_count': {'key': 'instanceCount', 'type': 'int'}, + 'instance_type': {'key': 'instanceType', 'type': 'str'}, + 'is_local': {'key': 'isLocal', 'type': 'bool'}, + 'location': {'key': 'location', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'target': {'key': 'target', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ComputeConfiguration, self).__init__(**kwargs) + self.instance_count = kwargs.get('instance_count', None) + self.instance_type = kwargs.get('instance_type', None) + self.is_local = kwargs.get('is_local', None) + self.location = kwargs.get('location', None) + self.properties = kwargs.get('properties', None) + self.target = kwargs.get('target', None) + + +class ComputeInstance(Compute): + """An Azure Machine Learning compute instance. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_location: Location for the underlying compute + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values + are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible + values include: 'Unknown', 'Updating', 'Creating', 'Deleting', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: datetime + :param resource_id: ARM resource id of the underlying compute + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning + :vartype provisioning_errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned + by user and brought from outside if true, or machine learning service + provisioned it if false. + :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool + :param compute_type: Required. Constant filled by server. + :type compute_type: str + :param properties: Compute Instance properties + :type properties: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, + } + + def __init__(self, **kwargs): + super(ComputeInstance, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.compute_type = 'ComputeInstance' + + +class ComputeInstanceApplication(Model): + """Defines an Aml Instance application and its connectivity endpoint URI. + + :param display_name: Name of the ComputeInstance application. + :type display_name: str + :param endpoint_uri: Application' endpoint URI. + :type endpoint_uri: str + """ + + _attribute_map = { + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ComputeInstanceApplication, self).__init__(**kwargs) + self.display_name = kwargs.get('display_name', None) + self.endpoint_uri = kwargs.get('endpoint_uri', None) + + +class ComputeInstanceConnectivityEndpoints(Model): + """Defines all connectivity endpoints and properties for an ComputeInstance. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar public_ip_address: Public IP Address of this ComputeInstance. + :vartype public_ip_address: str + :ivar private_ip_address: Private IP Address of this ComputeInstance + (local to the VNET in which the compute instance is deployed). + :vartype private_ip_address: str + """ + + _validation = { + 'public_ip_address': {'readonly': True}, + 'private_ip_address': {'readonly': True}, + } + + _attribute_map = { + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, + 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs) + self.public_ip_address = None + self.private_ip_address = None + + +class ComputeInstanceCreatedBy(Model): + """Describes information on user who created this ComputeInstance. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar user_name: Name of the user. + :vartype user_name: str + :ivar user_org_id: Uniquely identifies user' Azure Active Directory + organization. + :vartype user_org_id: str + :ivar user_id: Uniquely identifies the user within his/her organization. + :vartype user_id: str + """ + + _validation = { + 'user_name': {'readonly': True}, + 'user_org_id': {'readonly': True}, + 'user_id': {'readonly': True}, + } + + _attribute_map = { + 'user_name': {'key': 'userName', 'type': 'str'}, + 'user_org_id': {'key': 'userOrgId', 'type': 'str'}, + 'user_id': {'key': 'userId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ComputeInstanceCreatedBy, self).__init__(**kwargs) + self.user_name = None + self.user_org_id = None + self.user_id = None + + +class ComputeInstanceLastOperation(Model): + """The last operation on ComputeInstance. + + :param operation_name: Name of the last operation. Possible values + include: 'Create', 'Start', 'Stop', 'Restart', 'Reimage', 'Delete' + :type operation_name: str or + ~azure.mgmt.machinelearningservices.models.OperationName + :param operation_time: Time of the last operation. + :type operation_time: datetime + :param operation_status: Operation status. Possible values include: + 'InProgress', 'Succeeded', 'CreateFailed', 'StartFailed', 'StopFailed', + 'RestartFailed', 'ReimageFailed', 'DeleteFailed' + :type operation_status: str or + ~azure.mgmt.machinelearningservices.models.OperationStatus + """ + + _attribute_map = { + 'operation_name': {'key': 'operationName', 'type': 'str'}, + 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'}, + 'operation_status': {'key': 'operationStatus', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ComputeInstanceLastOperation, self).__init__(**kwargs) + self.operation_name = kwargs.get('operation_name', None) + self.operation_time = kwargs.get('operation_time', None) + self.operation_status = kwargs.get('operation_status', None) + + +class ComputeInstanceProperties(Model): + """Compute Instance properties. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param vm_size: Virtual Machine Size + :type vm_size: str + :param subnet: Subnet. Virtual network subnet resource ID the compute + nodes belong to. + :type subnet: ~azure.mgmt.machinelearningservices.models.ResourceId + :param application_sharing_policy: Sharing policy for applications on this + compute instance. Policy for sharing applications on this compute instance + among users of parent workspace. If Personal, only the creator can access + applications on this compute instance. When Shared, any workspace user can + access applications on this instance depending on his/her assigned role. + Possible values include: 'Personal', 'Shared'. Default value: "Shared" . + :type application_sharing_policy: str or + ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy + :param ssh_settings: Specifies policy and settings for SSH access. + :type ssh_settings: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings + :ivar connectivity_endpoints: Describes all connectivity endpoints + available for this ComputeInstance. + :vartype connectivity_endpoints: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceConnectivityEndpoints + :ivar applications: Describes available applications and their endpoints + on this ComputeInstance. + :vartype applications: + list[~azure.mgmt.machinelearningservices.models.ComputeInstanceApplication] + :ivar created_by: Describes information on user who created this + ComputeInstance. + :vartype created_by: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceCreatedBy + :ivar errors: Errors. Collection of errors encountered on this + ComputeInstance. + :vartype errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar state: The current state of this ComputeInstance. Possible values + include: 'Creating', 'CreateFailed', 'Deleting', 'Running', 'Restarting', + 'JobRunning', 'SettingUp', 'SetupFailed', 'Starting', 'Stopped', + 'Stopping', 'UserSettingUp', 'UserSetupFailed', 'Unknown', 'Unusable' + :vartype state: str or + ~azure.mgmt.machinelearningservices.models.ComputeInstanceState + :param compute_instance_authorization_type: Compute Instance Authorization + type. The Compute Instance Authorization type. Available values are + personal (default). Possible values include: 'personal'. Default value: + "personal" . + :type compute_instance_authorization_type: str or + ~azure.mgmt.machinelearningservices.models.ComputeInstanceAuthorizationType + :param personal_compute_instance_settings: Personal Compute Instance + settings. Settings for a personal compute instance. + :type personal_compute_instance_settings: + ~azure.mgmt.machinelearningservices.models.PersonalComputeInstanceSettings + :param setup_scripts: Details of customized scripts to execute for setting + up the cluster. + :type setup_scripts: + ~azure.mgmt.machinelearningservices.models.SetupScripts + :ivar last_operation: The last operation on ComputeInstance. + :vartype last_operation: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceLastOperation + :param schedules: The list of schedules to be applied on the compute + instance. + :type schedules: + ~azure.mgmt.machinelearningservices.models.ComputeSchedules + """ + + _validation = { + 'connectivity_endpoints': {'readonly': True}, + 'applications': {'readonly': True}, + 'created_by': {'readonly': True}, + 'errors': {'readonly': True}, + 'state': {'readonly': True}, + 'last_operation': {'readonly': True}, + } + + _attribute_map = { + 'vm_size': {'key': 'vmSize', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, + 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'}, + 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'}, + 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'}, + 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'}, + 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'}, + 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'}, + 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'}, + 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'}, + 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'}, + 'schedules': {'key': 'schedules', 'type': 'ComputeSchedules'}, + } + + def __init__(self, **kwargs): + super(ComputeInstanceProperties, self).__init__(**kwargs) + self.vm_size = kwargs.get('vm_size', None) + self.subnet = kwargs.get('subnet', None) + self.application_sharing_policy = kwargs.get('application_sharing_policy', "Shared") + self.ssh_settings = kwargs.get('ssh_settings', None) + self.connectivity_endpoints = None + self.applications = None + self.created_by = None + self.errors = None + self.state = None + self.compute_instance_authorization_type = kwargs.get('compute_instance_authorization_type', "personal") + self.personal_compute_instance_settings = kwargs.get('personal_compute_instance_settings', None) + self.setup_scripts = kwargs.get('setup_scripts', None) + self.last_operation = None + self.schedules = kwargs.get('schedules', None) + + +class ComputeInstanceSshSettings(Model): + """Specifies policy and settings for SSH access. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param ssh_public_access: Access policy for SSH. State of the public SSH + port. Possible values are: Disabled - Indicates that the public ssh port + is closed on this instance. Enabled - Indicates that the public ssh port + is open and accessible according to the VNet/subnet policy if applicable. + Possible values include: 'Enabled', 'Disabled'. Default value: "Disabled" + . + :type ssh_public_access: str or + ~azure.mgmt.machinelearningservices.models.SshPublicAccess + :ivar admin_user_name: Describes the admin user name. + :vartype admin_user_name: str + :ivar ssh_port: Describes the port for connecting through SSH. + :vartype ssh_port: int + :param admin_public_key: Specifies the SSH rsa public key file as a + string. Use "ssh-keygen -t rsa -b 2048" to generate your SSH key pairs. + :type admin_public_key: str + """ + + _validation = { + 'admin_user_name': {'readonly': True}, + 'ssh_port': {'readonly': True}, + } + + _attribute_map = { + 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'}, + 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, + 'ssh_port': {'key': 'sshPort', 'type': 'int'}, + 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ComputeInstanceSshSettings, self).__init__(**kwargs) + self.ssh_public_access = kwargs.get('ssh_public_access', "Disabled") + self.admin_user_name = None + self.ssh_port = None + self.admin_public_key = kwargs.get('admin_public_key', None) + + +class ComputeNodesInformation(Model): + """Compute nodes information related to a Machine Learning compute. Might + differ for every type of compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: The continuation token. + :vartype next_link: str + :param compute_type: Required. Constant filled by server. + :type compute_type: str + """ + + _validation = { + 'next_link': {'readonly': True}, + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ComputeNodesInformation, self).__init__(**kwargs) + self.next_link = None + self.compute_type = None + + +class ComputeResource(Resource): + """Machine Learning compute object wrapped into ARM resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Compute properties + :type properties: ~azure.mgmt.machinelearningservices.models.Compute + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.machinelearningservices.models.Identity + :param location: Specifies the location of the resource. + :type location: str + :param tags: Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure.mgmt.machinelearningservices.models.Sku + :param system_data: + :type system_data: ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Compute'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(ComputeResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.identity = kwargs.get('identity', None) + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) + self.sku = kwargs.get('sku', None) + self.system_data = kwargs.get('system_data', None) + + +class ComputeSchedules(Model): + """The list of schedules to be applied on the computes. + + :param compute_start_stop: The list of compute start stop schedules to be + applied. + :type compute_start_stop: + list[~azure.mgmt.machinelearningservices.models.ComputeStartStopSchedule] + """ + + _attribute_map = { + 'compute_start_stop': {'key': 'computeStartStop', 'type': '[ComputeStartStopSchedule]'}, + } + + def __init__(self, **kwargs): + super(ComputeSchedules, self).__init__(**kwargs) + self.compute_start_stop = kwargs.get('compute_start_stop', None) + + +class ComputeStartStopSchedule(Model): + """Compute start stop schedule properties. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Schedule id. + :vartype id: str + :ivar provisioning_status: The current deployment state of schedule. + Possible values include: 'Completed', 'Provisioning', 'Failed' + :vartype provisioning_status: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningStatus + :param status: Possible values include: 'Enabled', 'Disabled' + :type status: str or + ~azure.mgmt.machinelearningservices.models.ScheduleStatus + :param trigger_type: Possible values include: 'Recurrence', 'Cron' + :type trigger_type: str or + ~azure.mgmt.machinelearningservices.models.TriggerType + :param action: Possible values include: 'Start', 'Stop' + :type action: str or + ~azure.mgmt.machinelearningservices.models.ComputePowerAction + :param recurrence: + :type recurrence: ~azure.mgmt.machinelearningservices.models.Recurrence + :param cron: + :type cron: ~azure.mgmt.machinelearningservices.models.Cron + """ + + _validation = { + 'id': {'readonly': True}, + 'provisioning_status': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + 'action': {'key': 'action', 'type': 'str'}, + 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'}, + 'cron': {'key': 'cron', 'type': 'Cron'}, + } + + def __init__(self, **kwargs): + super(ComputeStartStopSchedule, self).__init__(**kwargs) + self.id = None + self.provisioning_status = None + self.status = kwargs.get('status', None) + self.trigger_type = kwargs.get('trigger_type', None) + self.action = kwargs.get('action', None) + self.recurrence = kwargs.get('recurrence', None) + self.cron = kwargs.get('cron', None) + + +class ContainerResourceRequirements(Model): + """The resource requirements for the container (cpu and memory). + + :param cpu: The minimum amount of CPU cores to be used by the container. + More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/ + :type cpu: float + :param cpu_limit: The maximum amount of CPU cores allowed to be used by + the container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/ + :type cpu_limit: float + :param memory_in_gb: The minimum amount of memory (in GB) to be used by + the container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/ + :type memory_in_gb: float + :param memory_in_gb_limit: The maximum amount of memory (in GB) allowed to + be used by the container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/ + :type memory_in_gb_limit: float + :param gpu: The number of GPU cores in the container. + :type gpu: int + :param fpga: The number of FPGA PCIE devices exposed to the container. + Must be multiple of 2. + :type fpga: int + """ + + _attribute_map = { + 'cpu': {'key': 'cpu', 'type': 'float'}, + 'cpu_limit': {'key': 'cpuLimit', 'type': 'float'}, + 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'}, + 'memory_in_gb_limit': {'key': 'memoryInGBLimit', 'type': 'float'}, + 'gpu': {'key': 'gpu', 'type': 'int'}, + 'fpga': {'key': 'fpga', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(ContainerResourceRequirements, self).__init__(**kwargs) + self.cpu = kwargs.get('cpu', None) + self.cpu_limit = kwargs.get('cpu_limit', None) + self.memory_in_gb = kwargs.get('memory_in_gb', None) + self.memory_in_gb_limit = kwargs.get('memory_in_gb_limit', None) + self.gpu = kwargs.get('gpu', None) + self.fpga = kwargs.get('fpga', None) + + +class CosmosDbSettings(Model): + """CosmosDbSettings. + + :param collections_throughput: The throughput of the collections in + cosmosdb database + :type collections_throughput: int + """ + + _attribute_map = { + 'collections_throughput': {'key': 'collectionsThroughput', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(CosmosDbSettings, self).__init__(**kwargs) + self.collections_throughput = kwargs.get('collections_throughput', None) + + +class Cron(Model): + """The workflow trigger cron for ComputeStartStop schedule type. + + :param start_time: The start time. + :type start_time: str + :param time_zone: The time zone. + :type time_zone: str + :param expression: The cron expression. + :type expression: str + """ + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'expression': {'key': 'expression', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Cron, self).__init__(**kwargs) + self.start_time = kwargs.get('start_time', None) + self.time_zone = kwargs.get('time_zone', None) + self.expression = kwargs.get('expression', None) + + +class CsvExportSummary(ExportSummary): + """CsvExportSummary. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_time_utc: The time when the export was completed. + :vartype end_time_utc: datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: long + :ivar labeling_job_id: Name and identifier of the job containing exported + labels. + :vartype labeling_job_id: str + :ivar start_time_utc: The time when the export was requested. + :vartype start_time_utc: datetime + :param format: Required. Constant filled by server. + :type format: str + :ivar container_name: The container name to which the labels will be + exported. + :vartype container_name: str + :ivar snapshot_path: The output path where the labels will be exported. + :vartype snapshot_path: str + """ + + _validation = { + 'end_time_utc': {'readonly': True}, + 'exported_row_count': {'readonly': True}, + 'labeling_job_id': {'readonly': True}, + 'start_time_utc': {'readonly': True}, + 'format': {'required': True}, + 'container_name': {'readonly': True}, + 'snapshot_path': {'readonly': True}, + } + + _attribute_map = { + 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'}, + 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, + 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, + 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'}, + 'format': {'key': 'format', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(CsvExportSummary, self).__init__(**kwargs) + self.container_name = None + self.snapshot_path = None + self.format = 'CSV' + + +class Databricks(Compute): + """A DataFactory compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_location: Location for the underlying compute + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values + are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible + values include: 'Unknown', 'Updating', 'Creating', 'Deleting', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: datetime + :param resource_id: ARM resource id of the underlying compute + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning + :vartype provisioning_errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned + by user and brought from outside if true, or machine learning service + provisioned it if false. + :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool + :param compute_type: Required. Constant filled by server. + :type compute_type: str + :param properties: + :type properties: + ~azure.mgmt.machinelearningservices.models.DatabricksProperties + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, + } + + def __init__(self, **kwargs): + super(Databricks, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.compute_type = 'Databricks' + + +class DatabricksComputeSecrets(ComputeSecrets): + """Secrets related to a Machine Learning compute based on Databricks. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. Constant filled by server. + :type compute_type: str + :param databricks_access_token: access token for databricks account. + :type databricks_access_token: str + """ + + _validation = { + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DatabricksComputeSecrets, self).__init__(**kwargs) + self.databricks_access_token = kwargs.get('databricks_access_token', None) + self.compute_type = 'Databricks' + + +class DatabricksProperties(Model): + """DatabricksProperties. + + :param databricks_access_token: Databricks access token + :type databricks_access_token: str + :param workspace_url: Workspace Url + :type workspace_url: str + """ + + _attribute_map = { + 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + 'workspace_url': {'key': 'workspaceUrl', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DatabricksProperties, self).__init__(**kwargs) + self.databricks_access_token = kwargs.get('databricks_access_token', None) + self.workspace_url = kwargs.get('workspace_url', None) + + +class DataContainer(Model): + """Container for data asset versions. + + :param description: The asset description text. + :type description: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(DataContainer, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.properties = kwargs.get('properties', None) + self.tags = kwargs.get('tags', None) + + +class DataContainerResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.DataContainer + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataContainer'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(DataContainerResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class DataFactory(Compute): + """A DataFactory compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_location: Location for the underlying compute + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values + are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible + values include: 'Unknown', 'Updating', 'Creating', 'Deleting', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: datetime + :param resource_id: ARM resource id of the underlying compute + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning + :vartype provisioning_errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned + by user and brought from outside if true, or machine learning service + provisioned it if false. + :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool + :param compute_type: Required. Constant filled by server. + :type compute_type: str + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DataFactory, self).__init__(**kwargs) + self.compute_type = 'DataFactory' + + +class DataLakeAnalytics(Compute): + """A DataLakeAnalytics compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_location: Location for the underlying compute + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values + are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible + values include: 'Unknown', 'Updating', 'Creating', 'Deleting', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: datetime + :param resource_id: ARM resource id of the underlying compute + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning + :vartype provisioning_errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned + by user and brought from outside if true, or machine learning service + provisioned it if false. + :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool + :param compute_type: Required. Constant filled by server. + :type compute_type: str + :param properties: + :type properties: + ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsProperties + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'}, + } + + def __init__(self, **kwargs): + super(DataLakeAnalytics, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.compute_type = 'DataLakeAnalytics' + + +class DataLakeAnalyticsProperties(Model): + """DataLakeAnalyticsProperties. + + :param data_lake_store_account_name: DataLake Store Account Name + :type data_lake_store_account_name: str + """ + + _attribute_map = { + 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DataLakeAnalyticsProperties, self).__init__(**kwargs) + self.data_lake_store_account_name = kwargs.get('data_lake_store_account_name', None) + + +class DataPathAssetReference(AssetReferenceBase): + """Reference to an asset via its path in a datastore. + + All required parameters must be populated in order to send to Azure. + + :param reference_type: Required. Constant filled by server. + :type reference_type: str + :param datastore_id: ARM resource ID of the datastore where the asset is + located. + :type datastore_id: str + :param path: The path of the file/directory in the datastore. + :type path: str + """ + + _validation = { + 'reference_type': {'required': True}, + } + + _attribute_map = { + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DataPathAssetReference, self).__init__(**kwargs) + self.datastore_id = kwargs.get('datastore_id', None) + self.path = kwargs.get('path', None) + self.reference_type = 'DataPath' + + +class DatasetExportSummary(ExportSummary): + """DatasetExportSummary. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_time_utc: The time when the export was completed. + :vartype end_time_utc: datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: long + :ivar labeling_job_id: Name and identifier of the job containing exported + labels. + :vartype labeling_job_id: str + :ivar start_time_utc: The time when the export was requested. + :vartype start_time_utc: datetime + :param format: Required. Constant filled by server. + :type format: str + :ivar labeled_asset_name: The unique name of the labeled data asset. + :vartype labeled_asset_name: str + """ + + _validation = { + 'end_time_utc': {'readonly': True}, + 'exported_row_count': {'readonly': True}, + 'labeling_job_id': {'readonly': True}, + 'start_time_utc': {'readonly': True}, + 'format': {'required': True}, + 'labeled_asset_name': {'readonly': True}, + } + + _attribute_map = { + 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'}, + 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, + 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, + 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'}, + 'format': {'key': 'format', 'type': 'str'}, + 'labeled_asset_name': {'key': 'labeledAssetName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DatasetExportSummary, self).__init__(**kwargs) + self.labeled_asset_name = None + self.format = 'Dataset' + + +class DatastoreProperties(Model): + """Datastore definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param contents: Required. Reference to the datastore storage contents. + :type contents: + ~azure.mgmt.machinelearningservices.models.DatastoreContents + :param description: The asset description text. + :type description: str + :ivar has_been_validated: Whether the service has validated access to the + datastore with the provided credentials. + :vartype has_been_validated: bool + :param is_default: Whether this datastore is the default for the + workspace. + :type is_default: bool + :param linked_info: Information about the datastore origin, if linked. + :type linked_info: ~azure.mgmt.machinelearningservices.models.LinkedInfo + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _validation = { + 'contents': {'required': True}, + 'has_been_validated': {'readonly': True}, + } + + _attribute_map = { + 'contents': {'key': 'contents', 'type': 'DatastoreContents'}, + 'description': {'key': 'description', 'type': 'str'}, + 'has_been_validated': {'key': 'hasBeenValidated', 'type': 'bool'}, + 'is_default': {'key': 'isDefault', 'type': 'bool'}, + 'linked_info': {'key': 'linkedInfo', 'type': 'LinkedInfo'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(DatastoreProperties, self).__init__(**kwargs) + self.contents = kwargs.get('contents', None) + self.description = kwargs.get('description', None) + self.has_been_validated = None + self.is_default = kwargs.get('is_default', None) + self.linked_info = kwargs.get('linked_info', None) + self.properties = kwargs.get('properties', None) + self.tags = kwargs.get('tags', None) + + +class DatastorePropertiesResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.DatastoreProperties + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DatastoreProperties'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(DatastorePropertiesResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class DataVersion(Model): + """Data asset version details. + + All required parameters must be populated in order to send to Azure. + + :param dataset_type: The Format of dataset. Possible values include: + 'Simple', 'Dataflow' + :type dataset_type: str or + ~azure.mgmt.machinelearningservices.models.DatasetType + :param datastore_id: ARM resource ID of the datastore where the asset is + located. + :type datastore_id: str + :param description: The asset description text. + :type description: str + :param is_anonymous: If the name version are system generated (anonymous + registration). + :type is_anonymous: bool + :param path: Required. The path of the file/directory in the datastore. + :type path: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _validation = { + 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'dataset_type': {'key': 'datasetType', 'type': 'str'}, + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'path': {'key': 'path', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(DataVersion, self).__init__(**kwargs) + self.dataset_type = kwargs.get('dataset_type', None) + self.datastore_id = kwargs.get('datastore_id', None) + self.description = kwargs.get('description', None) + self.is_anonymous = kwargs.get('is_anonymous', None) + self.path = kwargs.get('path', None) + self.properties = kwargs.get('properties', None) + self.tags = kwargs.get('tags', None) + + +class DataVersionResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.DataVersion + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataVersion'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(DataVersionResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class DeploymentLogs(Model): + """DeploymentLogs. + + :param content: The retrieved online deployment logs. + :type content: str + """ + + _attribute_map = { + 'content': {'key': 'content', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DeploymentLogs, self).__init__(**kwargs) + self.content = kwargs.get('content', None) + + +class DeploymentLogsRequest(Model): + """DeploymentLogsRequest. + + :param container_type: The type of container to retrieve logs from. + Possible values include: 'StorageInitializer', 'InferenceServer' + :type container_type: str or + ~azure.mgmt.machinelearningservices.models.ContainerType + :param tail: The maximum number of lines to tail. + :type tail: int + """ + + _attribute_map = { + 'container_type': {'key': 'containerType', 'type': 'str'}, + 'tail': {'key': 'tail', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(DeploymentLogsRequest, self).__init__(**kwargs) + self.container_type = kwargs.get('container_type', None) + self.tail = kwargs.get('tail', None) + + +class DistributionConfiguration(Model): + """Base definition for job distribution configuration. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: Mpi, PyTorch, TensorFlow + + All required parameters must be populated in order to send to Azure. + + :param distribution_type: Required. Constant filled by server. + :type distribution_type: str + """ + + _validation = { + 'distribution_type': {'required': True}, + } + + _attribute_map = { + 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + } + + _subtype_map = { + 'distribution_type': {'Mpi': 'Mpi', 'PyTorch': 'PyTorch', 'TensorFlow': 'TensorFlow'} + } + + def __init__(self, **kwargs): + super(DistributionConfiguration, self).__init__(**kwargs) + self.distribution_type = None + + +class DockerSpecification(Model): + """Configuration settings for Docker. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DockerBuild, DockerImage + + All required parameters must be populated in order to send to Azure. + + :param platform: The platform information of the docker image. + :type platform: + ~azure.mgmt.machinelearningservices.models.DockerImagePlatform + :param docker_specification_type: Required. Constant filled by server. + :type docker_specification_type: str + """ + + _validation = { + 'docker_specification_type': {'required': True}, + } + + _attribute_map = { + 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'}, + 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'}, + } + + _subtype_map = { + 'docker_specification_type': {'Build': 'DockerBuild', 'Image': 'DockerImage'} + } + + def __init__(self, **kwargs): + super(DockerSpecification, self).__init__(**kwargs) + self.platform = kwargs.get('platform', None) + self.docker_specification_type = None + + +class DockerBuild(DockerSpecification): + """Class to represent configuration settings for Docker Build. + + All required parameters must be populated in order to send to Azure. + + :param platform: The platform information of the docker image. + :type platform: + ~azure.mgmt.machinelearningservices.models.DockerImagePlatform + :param docker_specification_type: Required. Constant filled by server. + :type docker_specification_type: str + :param context: Path to a snapshot of the Docker Context. This property is + only valid if Dockerfile is specified. + The path is relative to the asset path which must contain a single Blob + URI value. + + :type context: str + :param dockerfile: Required. Docker command line instructions to assemble + an image. + + :type dockerfile: str + """ + + _validation = { + 'docker_specification_type': {'required': True}, + 'dockerfile': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'}, + 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'}, + 'context': {'key': 'context', 'type': 'str'}, + 'dockerfile': {'key': 'dockerfile', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DockerBuild, self).__init__(**kwargs) + self.context = kwargs.get('context', None) + self.dockerfile = kwargs.get('dockerfile', None) + self.docker_specification_type = 'Build' + + +class DockerImage(DockerSpecification): + """Class to represent configuration settings for Docker Build. + + All required parameters must be populated in order to send to Azure. + + :param platform: The platform information of the docker image. + :type platform: + ~azure.mgmt.machinelearningservices.models.DockerImagePlatform + :param docker_specification_type: Required. Constant filled by server. + :type docker_specification_type: str + :param docker_image_uri: Required. Image name of a custom base image. + + :type docker_image_uri: str + """ + + _validation = { + 'docker_specification_type': {'required': True}, + 'docker_image_uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'}, + 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'}, + 'docker_image_uri': {'key': 'dockerImageUri', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DockerImage, self).__init__(**kwargs) + self.docker_image_uri = kwargs.get('docker_image_uri', None) + self.docker_specification_type = 'Image' + + +class DockerImagePlatform(Model): + """DockerImagePlatform. + + :param operating_system_type: The OS type the Environment. Possible values + include: 'Linux', 'Windows' + :type operating_system_type: str or + ~azure.mgmt.machinelearningservices.models.OperatingSystemType + """ + + _attribute_map = { + 'operating_system_type': {'key': 'operatingSystemType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(DockerImagePlatform, self).__init__(**kwargs) + self.operating_system_type = kwargs.get('operating_system_type', None) + + +class EncryptionProperty(Model): + """EncryptionProperty. + + All required parameters must be populated in order to send to Azure. + + :param status: Required. Indicates whether or not the encryption is + enabled for the workspace. Possible values include: 'Enabled', 'Disabled' + :type status: str or + ~azure.mgmt.machinelearningservices.models.EncryptionStatus + :param identity: The identity that will be used to access the key vault + for encryption at rest. + :type identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk + :param key_vault_properties: Required. Customer Key vault properties. + :type key_vault_properties: + ~azure.mgmt.machinelearningservices.models.KeyVaultProperties + """ + + _validation = { + 'status': {'required': True}, + 'key_vault_properties': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityForCmk'}, + 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'}, + } + + def __init__(self, **kwargs): + super(EncryptionProperty, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.identity = kwargs.get('identity', None) + self.key_vault_properties = kwargs.get('key_vault_properties', None) + + +class EndpointAuthKeys(Model): + """Keys for endpoint authentication. + + :param primary_key: The primary key. + :type primary_key: str + :param secondary_key: The secondary key. + :type secondary_key: str + """ + + _attribute_map = { + 'primary_key': {'key': 'primaryKey', 'type': 'str'}, + 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EndpointAuthKeys, self).__init__(**kwargs) + self.primary_key = kwargs.get('primary_key', None) + self.secondary_key = kwargs.get('secondary_key', None) + + +class EndpointAuthToken(Model): + """Service Token. + + :param access_token: Access token. + :type access_token: str + :param expiry_time_utc: Access token expiry time (UTC). + :type expiry_time_utc: long + :param refresh_after_time_utc: Refresh access token after time (UTC). + :type refresh_after_time_utc: long + :param token_type: Access token type. + :type token_type: str + """ + + _attribute_map = { + 'access_token': {'key': 'accessToken', 'type': 'str'}, + 'expiry_time_utc': {'key': 'expiryTimeUtc', 'type': 'long'}, + 'refresh_after_time_utc': {'key': 'refreshAfterTimeUtc', 'type': 'long'}, + 'token_type': {'key': 'tokenType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EndpointAuthToken, self).__init__(**kwargs) + self.access_token = kwargs.get('access_token', None) + self.expiry_time_utc = kwargs.get('expiry_time_utc', None) + self.refresh_after_time_utc = kwargs.get('refresh_after_time_utc', None) + self.token_type = kwargs.get('token_type', None) + + +class EnvironmentContainer(Model): + """Container for environment specification versions. + + :param description: The asset description text. + :type description: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(EnvironmentContainer, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.properties = kwargs.get('properties', None) + self.tags = kwargs.get('tags', None) + + +class EnvironmentContainerResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'EnvironmentContainer'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(EnvironmentContainerResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class EnvironmentSpecificationVersion(Model): + """Environment specification version details. + . + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param conda_file: Standard configuration file used by Conda that lets you + install any kind of package, including Python, R, and C/C++ packages. + + :type conda_file: str + :param description: The asset description text. + :type description: str + :param docker: Configuration settings for Docker. + :type docker: + ~azure.mgmt.machinelearningservices.models.DockerSpecification + :ivar environment_specification_type: Environment specification is either + user managed or curated by the Azure ML service + . Possible values include: 'Curated', 'UserCreated' + :vartype environment_specification_type: str or + ~azure.mgmt.machinelearningservices.models.EnvironmentSpecificationType + :param inference_container_properties: Defines configuration specific to + inference. + :type inference_container_properties: + ~azure.mgmt.machinelearningservices.models.InferenceContainerProperties + :param is_anonymous: If the name version are system generated (anonymous + registration). + :type is_anonymous: bool + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _validation = { + 'environment_specification_type': {'readonly': True}, + } + + _attribute_map = { + 'conda_file': {'key': 'condaFile', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'docker': {'key': 'docker', 'type': 'DockerSpecification'}, + 'environment_specification_type': {'key': 'environmentSpecificationType', 'type': 'str'}, + 'inference_container_properties': {'key': 'inferenceContainerProperties', 'type': 'InferenceContainerProperties'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(EnvironmentSpecificationVersion, self).__init__(**kwargs) + self.conda_file = kwargs.get('conda_file', None) + self.description = kwargs.get('description', None) + self.docker = kwargs.get('docker', None) + self.environment_specification_type = None + self.inference_container_properties = kwargs.get('inference_container_properties', None) + self.is_anonymous = kwargs.get('is_anonymous', None) + self.properties = kwargs.get('properties', None) + self.tags = kwargs.get('tags', None) + + +class EnvironmentSpecificationVersionResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.EnvironmentSpecificationVersion + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'EnvironmentSpecificationVersion'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(EnvironmentSpecificationVersionResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class ErrorAdditionalInfo(Model): + """The resource management error additional info. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: object + """ + + _validation = { + 'type': {'readonly': True}, + 'info': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ErrorAdditionalInfo, self).__init__(**kwargs) + self.type = None + self.info = None + + +class ErrorDetail(Model): + """The error detail. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: + list[~azure.mgmt.machinelearningservices.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: + list[~azure.mgmt.machinelearningservices.models.ErrorAdditionalInfo] + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'target': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, + } + + def __init__(self, **kwargs): + super(ErrorDetail, self).__init__(**kwargs) + self.code = None + self.message = None + self.target = None + self.details = None + self.additional_info = None + + +class ErrorResponse(Model): + """Error response. + + Common error response for all Azure Resource Manager APIs to return error + details for failed operations. (This also follows the OData error response + format.). + + :param error: The error object. + :type error: ~azure.mgmt.machinelearningservices.models.ErrorDetail + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorDetail'}, + } + + def __init__(self, **kwargs): + super(ErrorResponse, self).__init__(**kwargs) + self.error = kwargs.get('error', None) + + +class ErrorResponseException(HttpOperationError): + """Server responsed with exception of type: 'ErrorResponse'. + + :param deserialize: A deserializer + :param response: Server response to be deserialized. + """ + + def __init__(self, deserialize, response, *args): + + super(ErrorResponseException, self).__init__(deserialize, response, 'ErrorResponse', *args) + + +class EstimatedVMPrice(Model): + """The estimated price info for using a VM of a particular OS type, tier, etc. + + All required parameters must be populated in order to send to Azure. + + :param retail_price: Required. Retail price. The price charged for using + the VM. + :type retail_price: float + :param os_type: Required. OS type. Operating system type used by the VM. + Possible values include: 'Linux', 'Windows' + :type os_type: str or + ~azure.mgmt.machinelearningservices.models.VMPriceOSType + :param vm_tier: Required. VM tier. The type of the VM. Possible values + include: 'Standard', 'LowPriority', 'Spot' + :type vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier + """ + + _validation = { + 'retail_price': {'required': True}, + 'os_type': {'required': True}, + 'vm_tier': {'required': True}, + } + + _attribute_map = { + 'retail_price': {'key': 'retailPrice', 'type': 'float'}, + 'os_type': {'key': 'osType', 'type': 'str'}, + 'vm_tier': {'key': 'vmTier', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(EstimatedVMPrice, self).__init__(**kwargs) + self.retail_price = kwargs.get('retail_price', None) + self.os_type = kwargs.get('os_type', None) + self.vm_tier = kwargs.get('vm_tier', None) + + +class EstimatedVMPrices(Model): + """The estimated price info for using a VM. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar billing_currency: Required. Billing currency. Three lettered code + specifying the currency of the VM price. Example: USD. Default value: + "USD" . + :vartype billing_currency: str + :ivar unit_of_measure: Required. Unit of time measure. The unit of time + measurement for the specified VM price. Example: OneHour. Default value: + "OneHour" . + :vartype unit_of_measure: str + :param values: Required. List of estimated VM prices. The list of + estimated prices for using a VM of a particular OS type, tier, etc. + :type values: + list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] + """ + + _validation = { + 'billing_currency': {'required': True, 'constant': True}, + 'unit_of_measure': {'required': True, 'constant': True}, + 'values': {'required': True}, + } + + _attribute_map = { + 'billing_currency': {'key': 'billingCurrency', 'type': 'str'}, + 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[EstimatedVMPrice]'}, + } + + billing_currency = "USD" + + unit_of_measure = "OneHour" + + def __init__(self, **kwargs): + super(EstimatedVMPrices, self).__init__(**kwargs) + self.values = kwargs.get('values', None) + + +class FlavorData(Model): + """FlavorData. + + :param data: Model flavor-specific data. + :type data: dict[str, str] + """ + + _attribute_map = { + 'data': {'key': 'data', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(FlavorData, self).__init__(**kwargs) + self.data = kwargs.get('data', None) + + +class GlusterFsContents(DatastoreContents): + """GlusterFs datastore configuration. + + All required parameters must be populated in order to send to Azure. + + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param server_address: Required. GlusterFS server address (can be the IP + address or server name). + :type server_address: str + :param volume_name: Required. GlusterFS volume name. + :type volume_name: str + """ + + _validation = { + 'contents_type': {'required': True}, + 'server_address': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'volume_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'server_address': {'key': 'serverAddress', 'type': 'str'}, + 'volume_name': {'key': 'volumeName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(GlusterFsContents, self).__init__(**kwargs) + self.server_address = kwargs.get('server_address', None) + self.volume_name = kwargs.get('volume_name', None) + self.contents_type = 'GlusterFs' + + +class HDInsight(Compute): + """A HDInsight compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_location: Location for the underlying compute + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values + are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible + values include: 'Unknown', 'Updating', 'Creating', 'Deleting', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: datetime + :param resource_id: ARM resource id of the underlying compute + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning + :vartype provisioning_errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned + by user and brought from outside if true, or machine learning service + provisioned it if false. + :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool + :param compute_type: Required. Constant filled by server. + :type compute_type: str + :param properties: + :type properties: + ~azure.mgmt.machinelearningservices.models.HDInsightProperties + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, + } + + def __init__(self, **kwargs): + super(HDInsight, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.compute_type = 'HDInsight' + + +class HDInsightProperties(Model): + """HDInsightProperties. + + :param ssh_port: Port open for ssh connections on the master node of the + cluster. + :type ssh_port: int + :param address: Public IP address of the master node of the cluster. + :type address: str + :param administrator_account: Admin credentials for master node of the + cluster + :type administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + + _attribute_map = { + 'ssh_port': {'key': 'sshPort', 'type': 'int'}, + 'address': {'key': 'address', 'type': 'str'}, + 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + } + + def __init__(self, **kwargs): + super(HDInsightProperties, self).__init__(**kwargs) + self.ssh_port = kwargs.get('ssh_port', None) + self.address = kwargs.get('address', None) + self.administrator_account = kwargs.get('administrator_account', None) + + +class IdAssetReference(AssetReferenceBase): + """Reference to an asset via its ARM resource ID. + + All required parameters must be populated in order to send to Azure. + + :param reference_type: Required. Constant filled by server. + :type reference_type: str + :param asset_id: Required. ARM resource ID of the asset. + :type asset_id: str + """ + + _validation = { + 'reference_type': {'required': True}, + 'asset_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + 'asset_id': {'key': 'assetId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IdAssetReference, self).__init__(**kwargs) + self.asset_id = kwargs.get('asset_id', None) + self.reference_type = 'Id' + + +class Identity(Model): + """Identity for the resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type: The identity type. Possible values include: 'SystemAssigned', + 'SystemAssigned,UserAssigned', 'UserAssigned', 'None' + :type type: str or + ~azure.mgmt.machinelearningservices.models.ResourceIdentityType + :param user_assigned_identities: The user assigned identities associated + with the resource. + :type user_assigned_identities: dict[str, + ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'ResourceIdentityType'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + } + + def __init__(self, **kwargs): + super(Identity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = kwargs.get('type', None) + self.user_assigned_identities = kwargs.get('user_assigned_identities', None) + + +class IdentityForCmk(Model): + """Identity that will be used to access key vault for encryption at rest. + + :param user_assigned_identity: The ArmId of the user assigned identity + that will be used to access the customer managed key vault + :type user_assigned_identity: str + """ + + _attribute_map = { + 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(IdentityForCmk, self).__init__(**kwargs) + self.user_assigned_identity = kwargs.get('user_assigned_identity', None) + + +class InferenceContainerProperties(Model): + """InferenceContainerProperties. + + :param liveness_route: The route to check the liveness of the inference + server container. + :type liveness_route: ~azure.mgmt.machinelearningservices.models.Route + :param readiness_route: The route to check the readiness of the inference + server container. + :type readiness_route: ~azure.mgmt.machinelearningservices.models.Route + :param scoring_route: The port to send the scoring requests to, within the + inference server container. + :type scoring_route: ~azure.mgmt.machinelearningservices.models.Route + """ + + _attribute_map = { + 'liveness_route': {'key': 'livenessRoute', 'type': 'Route'}, + 'readiness_route': {'key': 'readinessRoute', 'type': 'Route'}, + 'scoring_route': {'key': 'scoringRoute', 'type': 'Route'}, + } + + def __init__(self, **kwargs): + super(InferenceContainerProperties, self).__init__(**kwargs) + self.liveness_route = kwargs.get('liveness_route', None) + self.readiness_route = kwargs.get('readiness_route', None) + self.scoring_route = kwargs.get('scoring_route', None) + + +class InputDataBinding(Model): + """InputDataBinding. + + :param data_id: ARM resource ID of the registered dataVersion. + :type data_id: str + :param mode: Mechanism for accessing the data artifact. Possible values + include: 'Mount', 'Download', 'Upload' + :type mode: str or + ~azure.mgmt.machinelearningservices.models.DataBindingMode + :param path_on_compute: Location of data inside the container process. + :type path_on_compute: str + """ + + _attribute_map = { + 'data_id': {'key': 'dataId', 'type': 'str'}, + 'mode': {'key': 'mode', 'type': 'str'}, + 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(InputDataBinding, self).__init__(**kwargs) + self.data_id = kwargs.get('data_id', None) + self.mode = kwargs.get('mode', None) + self.path_on_compute = kwargs.get('path_on_compute', None) + + +class JobBaseResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.JobBase + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'JobBase'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(JobBaseResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class JobEndpoint(Model): + """Job endpoint definition. + + :param endpoint: Url for endpoint. + :type endpoint: str + :param job_endpoint_type: Endpoint type. + :type job_endpoint_type: str + :param port: Port for endpoint. + :type port: int + :param properties: Additional properties to set on the endpoint. + :type properties: dict[str, str] + """ + + _attribute_map = { + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'job_endpoint_type': {'key': 'jobEndpointType', 'type': 'str'}, + 'port': {'key': 'port', 'type': 'int'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(JobEndpoint, self).__init__(**kwargs) + self.endpoint = kwargs.get('endpoint', None) + self.job_endpoint_type = kwargs.get('job_endpoint_type', None) + self.port = kwargs.get('port', None) + self.properties = kwargs.get('properties', None) + + +class JobOutput(Model): + """Job output definition container information on where to find job + output/logs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar datastore_id: ARM ID of the datastore where the job logs and + artifacts are stored, or null for the default container ("azureml") in the + workspace's storage account. + :vartype datastore_id: str + :ivar path: Path within the datastore to the job logs and artifacts. + :vartype path: str + """ + + _validation = { + 'datastore_id': {'readonly': True}, + 'path': {'readonly': True}, + } + + _attribute_map = { + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(JobOutput, self).__init__(**kwargs) + self.datastore_id = None + self.path = None + + +class OnlineDeployment(Model): + """OnlineDeployment. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: K8sOnlineDeployment, ManagedOnlineDeployment + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param app_insights_enabled: If true, enables Application Insights + logging. + :type app_insights_enabled: bool + :param code_configuration: Code configuration for the endpoint deployment. + :type code_configuration: + ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :param description: Description of the endpoint deployment. + :type description: str + :param environment_id: ARM resource ID of the environment specification + for the endpoint deployment. + :type environment_id: str + :param environment_variables: Environment variables configuration for the + deployment. + :type environment_variables: dict[str, str] + :param liveness_probe: Deployment container liveness/readiness probe + configuration. + :type liveness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + :param model: Reference to the model asset for the endpoint deployment. + :type model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase + :param properties: Property dictionary. Properties can be added, but not + removed or altered. + :type properties: dict[str, str] + :ivar provisioning_state: Provisioning state for the endpoint deployment. + Possible values include: 'Creating', 'Deleting', 'Scaling', 'Updating', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :param request_settings: Online deployment scoring requests configuration. + :type request_settings: + ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :param scale_settings: Online deployment scaling configuration. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :param endpoint_compute_type: Required. Constant filled by server. + :type endpoint_compute_type: str + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'endpoint_compute_type': {'required': True}, + } + + _attribute_map = { + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'model': {'key': 'model', 'type': 'AssetReferenceBase'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + } + + _subtype_map = { + 'endpoint_compute_type': {'K8S': 'K8sOnlineDeployment', 'Managed': 'ManagedOnlineDeployment'} + } + + def __init__(self, **kwargs): + super(OnlineDeployment, self).__init__(**kwargs) + self.app_insights_enabled = kwargs.get('app_insights_enabled', None) + self.code_configuration = kwargs.get('code_configuration', None) + self.description = kwargs.get('description', None) + self.environment_id = kwargs.get('environment_id', None) + self.environment_variables = kwargs.get('environment_variables', None) + self.liveness_probe = kwargs.get('liveness_probe', None) + self.model = kwargs.get('model', None) + self.properties = kwargs.get('properties', None) + self.provisioning_state = None + self.request_settings = kwargs.get('request_settings', None) + self.scale_settings = kwargs.get('scale_settings', None) + self.endpoint_compute_type = None + + +class K8sOnlineDeployment(OnlineDeployment): + """K8sOnlineDeployment. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param app_insights_enabled: If true, enables Application Insights + logging. + :type app_insights_enabled: bool + :param code_configuration: Code configuration for the endpoint deployment. + :type code_configuration: + ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :param description: Description of the endpoint deployment. + :type description: str + :param environment_id: ARM resource ID of the environment specification + for the endpoint deployment. + :type environment_id: str + :param environment_variables: Environment variables configuration for the + deployment. + :type environment_variables: dict[str, str] + :param liveness_probe: Deployment container liveness/readiness probe + configuration. + :type liveness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + :param model: Reference to the model asset for the endpoint deployment. + :type model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase + :param properties: Property dictionary. Properties can be added, but not + removed or altered. + :type properties: dict[str, str] + :ivar provisioning_state: Provisioning state for the endpoint deployment. + Possible values include: 'Creating', 'Deleting', 'Scaling', 'Updating', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :param request_settings: Online deployment scoring requests configuration. + :type request_settings: + ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :param scale_settings: Online deployment scaling configuration. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :param endpoint_compute_type: Required. Constant filled by server. + :type endpoint_compute_type: str + :param container_resource_requirements: Resource requirements for each + container instance within an online deployment. + :type container_resource_requirements: + ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'endpoint_compute_type': {'required': True}, + } + + _attribute_map = { + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'model': {'key': 'model', 'type': 'AssetReferenceBase'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'}, + } + + def __init__(self, **kwargs): + super(K8sOnlineDeployment, self).__init__(**kwargs) + self.container_resource_requirements = kwargs.get('container_resource_requirements', None) + self.endpoint_compute_type = 'K8S' + + +class KeyVaultProperties(Model): + """KeyVaultProperties. + + All required parameters must be populated in order to send to Azure. + + :param key_vault_arm_id: Required. The ArmId of the keyVault where the + customer owned encryption key is present. + :type key_vault_arm_id: str + :param key_identifier: Required. Key vault uri to access the encryption + key. + :type key_identifier: str + :param identity_client_id: For future use - The client id of the identity + which will be used to access key vault. + :type identity_client_id: str + """ + + _validation = { + 'key_vault_arm_id': {'required': True}, + 'key_identifier': {'required': True}, + } + + _attribute_map = { + 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'}, + 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, + 'identity_client_id': {'key': 'identityClientId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(KeyVaultProperties, self).__init__(**kwargs) + self.key_vault_arm_id = kwargs.get('key_vault_arm_id', None) + self.key_identifier = kwargs.get('key_identifier', None) + self.identity_client_id = kwargs.get('identity_client_id', None) + + +class LabelCategory(Model): + """Label category definition. + + :param allow_multi_select: Indicates whether it is allowed to select + multiple classes in this category. + :type allow_multi_select: bool + :param classes: Dictionary of label classes in this category. + :type classes: dict[str, + ~azure.mgmt.machinelearningservices.models.LabelClass] + :param display_name: Display name of the label category. + :type display_name: str + """ + + _attribute_map = { + 'allow_multi_select': {'key': 'allowMultiSelect', 'type': 'bool'}, + 'classes': {'key': 'classes', 'type': '{LabelClass}'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LabelCategory, self).__init__(**kwargs) + self.allow_multi_select = kwargs.get('allow_multi_select', None) + self.classes = kwargs.get('classes', None) + self.display_name = kwargs.get('display_name', None) + + +class LabelClass(Model): + """Label class definition. + + :param display_name: Display name of the label class. + :type display_name: str + :param subclasses: Dictionary of subclasses of the label class. + :type subclasses: dict[str, + ~azure.mgmt.machinelearningservices.models.LabelClass] + """ + + _attribute_map = { + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'subclasses': {'key': 'subclasses', 'type': '{LabelClass}'}, + } + + def __init__(self, **kwargs): + super(LabelClass, self).__init__(**kwargs) + self.display_name = kwargs.get('display_name', None) + self.subclasses = kwargs.get('subclasses', None) + + +class LabelingDatasetConfiguration(Model): + """Labeling dataset configuration definition. + + :param asset_name: Name of the data asset to perform labeling. + :type asset_name: str + :param dataset_version: AML dataset version. + :type dataset_version: str + :param incremental_dataset_refresh_enabled: Indicates whether to enable + incremental dataset refresh. + :type incremental_dataset_refresh_enabled: bool + """ + + _attribute_map = { + 'asset_name': {'key': 'assetName', 'type': 'str'}, + 'dataset_version': {'key': 'datasetVersion', 'type': 'str'}, + 'incremental_dataset_refresh_enabled': {'key': 'incrementalDatasetRefreshEnabled', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(LabelingDatasetConfiguration, self).__init__(**kwargs) + self.asset_name = kwargs.get('asset_name', None) + self.dataset_version = kwargs.get('dataset_version', None) + self.incremental_dataset_refresh_enabled = kwargs.get('incremental_dataset_refresh_enabled', None) + + +class LabelingJob(Model): + """Labeling job definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar created_time_utc: Created time of the job in UTC timezone. + :vartype created_time_utc: datetime + :param dataset_configuration: Configuration of dataset used in the job. + :type dataset_configuration: + ~azure.mgmt.machinelearningservices.models.LabelingDatasetConfiguration + :param description: The asset description text. + :type description: str + :ivar interaction_endpoints: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of + FileStreamObject. + :vartype interaction_endpoints: dict[str, + ~azure.mgmt.machinelearningservices.models.JobEndpoint] + :param job_instructions: Labeling instructions of the job. + :type job_instructions: + ~azure.mgmt.machinelearningservices.models.LabelingJobInstructions + :param job_type: Required. Specifies the type of job. This field should + always be set to "Labeling". Possible values include: 'Command', 'Sweep', + 'Labeling' + :type job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :param label_categories: Label categories of the job. + :type label_categories: dict[str, + ~azure.mgmt.machinelearningservices.models.LabelCategory] + :param labeling_job_media_properties: Media type specific properties in + the job. + :type labeling_job_media_properties: + ~azure.mgmt.machinelearningservices.models.LabelingJobMediaProperties + :param ml_assist_configuration: Configuration of MLAssist feature in the + job. + :type ml_assist_configuration: + ~azure.mgmt.machinelearningservices.models.MLAssistConfiguration + :ivar progress_metrics: Progress metrics of the job. + :vartype progress_metrics: + ~azure.mgmt.machinelearningservices.models.ProgressMetrics + :ivar project_id: Internal id of the job(Previously called project). + :vartype project_id: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :ivar provisioning_state: Specifies the labeling job provisioning state. + Possible values include: 'Succeeded', 'Failed', 'Canceled', 'InProgress' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.JobProvisioningState + :ivar status: Status of the job. Possible values include: 'NotStarted', + 'Starting', 'Provisioning', 'Preparing', 'Queued', 'Running', + 'Finalizing', 'CancelRequested', 'Completed', 'Failed', 'Canceled', + 'NotResponding', 'Paused', 'Unknown' + :vartype status: str or + ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar status_messages: Status messages of the job. + :vartype status_messages: + list[~azure.mgmt.machinelearningservices.models.StatusMessage] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _validation = { + 'created_time_utc': {'readonly': True}, + 'interaction_endpoints': {'readonly': True}, + 'job_type': {'required': True}, + 'progress_metrics': {'readonly': True}, + 'project_id': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'status': {'readonly': True}, + 'status_messages': {'readonly': True}, + } + + _attribute_map = { + 'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'}, + 'dataset_configuration': {'key': 'datasetConfiguration', 'type': 'LabelingDatasetConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, + 'job_instructions': {'key': 'jobInstructions', 'type': 'LabelingJobInstructions'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'label_categories': {'key': 'labelCategories', 'type': '{LabelCategory}'}, + 'labeling_job_media_properties': {'key': 'labelingJobMediaProperties', 'type': 'LabelingJobMediaProperties'}, + 'ml_assist_configuration': {'key': 'mlAssistConfiguration', 'type': 'MLAssistConfiguration'}, + 'progress_metrics': {'key': 'progressMetrics', 'type': 'ProgressMetrics'}, + 'project_id': {'key': 'projectId', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'status_messages': {'key': 'statusMessages', 'type': '[StatusMessage]'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(LabelingJob, self).__init__(**kwargs) + self.created_time_utc = None + self.dataset_configuration = kwargs.get('dataset_configuration', None) + self.description = kwargs.get('description', None) + self.interaction_endpoints = None + self.job_instructions = kwargs.get('job_instructions', None) + self.job_type = kwargs.get('job_type', None) + self.label_categories = kwargs.get('label_categories', None) + self.labeling_job_media_properties = kwargs.get('labeling_job_media_properties', None) + self.ml_assist_configuration = kwargs.get('ml_assist_configuration', None) + self.progress_metrics = None + self.project_id = None + self.properties = kwargs.get('properties', None) + self.provisioning_state = None + self.status = None + self.status_messages = None + self.tags = kwargs.get('tags', None) + + +class LabelingJobMediaProperties(Model): + """Properties of a labeling job. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LabelingJobImageProperties, LabelingJobTextProperties + + All required parameters must be populated in order to send to Azure. + + :param media_type: Required. Constant filled by server. + :type media_type: str + """ + + _validation = { + 'media_type': {'required': True}, + } + + _attribute_map = { + 'media_type': {'key': 'mediaType', 'type': 'str'}, + } + + _subtype_map = { + 'media_type': {'Image': 'LabelingJobImageProperties', 'Text': 'LabelingJobTextProperties'} + } + + def __init__(self, **kwargs): + super(LabelingJobMediaProperties, self).__init__(**kwargs) + self.media_type = None + + +class LabelingJobImageProperties(LabelingJobMediaProperties): + """Properties of a labeling job for image data. + + All required parameters must be populated in order to send to Azure. + + :param media_type: Required. Constant filled by server. + :type media_type: str + :param annotation_type: Annotation type of image labeling job. Possible + values include: 'Classification', 'BoundingBox', 'InstanceSegmentation' + :type annotation_type: str or + ~azure.mgmt.machinelearningservices.models.ImageAnnotationType + """ + + _validation = { + 'media_type': {'required': True}, + } + + _attribute_map = { + 'media_type': {'key': 'mediaType', 'type': 'str'}, + 'annotation_type': {'key': 'annotationType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LabelingJobImageProperties, self).__init__(**kwargs) + self.annotation_type = kwargs.get('annotation_type', None) + self.media_type = 'Image' + + +class LabelingJobInstructions(Model): + """Instructions for labeling job. + + :param uri: The link to a page with detailed labeling instructions for + labelers. + :type uri: str + """ + + _attribute_map = { + 'uri': {'key': 'uri', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LabelingJobInstructions, self).__init__(**kwargs) + self.uri = kwargs.get('uri', None) + + +class LabelingJobResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.LabelingJob + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LabelingJob'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(LabelingJobResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class LabelingJobTextProperties(LabelingJobMediaProperties): + """Properties of a labeling job for text data. + + All required parameters must be populated in order to send to Azure. + + :param media_type: Required. Constant filled by server. + :type media_type: str + :param annotation_type: Annotation type of text labeling job. Possible + values include: 'Classification' + :type annotation_type: str or + ~azure.mgmt.machinelearningservices.models.TextAnnotationType + """ + + _validation = { + 'media_type': {'required': True}, + } + + _attribute_map = { + 'media_type': {'key': 'mediaType', 'type': 'str'}, + 'annotation_type': {'key': 'annotationType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LabelingJobTextProperties, self).__init__(**kwargs) + self.annotation_type = kwargs.get('annotation_type', None) + self.media_type = 'Text' + + +class LinkedInfo(Model): + """Information about a datastore origin, if linked. + + :param linked_id: Linked service ID. + :type linked_id: str + :param linked_resource_name: Linked service resource name. + :type linked_resource_name: str + :param origin: Type of the linked service. Possible values include: + 'Synapse' + :type origin: str or ~azure.mgmt.machinelearningservices.models.OriginType + """ + + _attribute_map = { + 'linked_id': {'key': 'linkedId', 'type': 'str'}, + 'linked_resource_name': {'key': 'linkedResourceName', 'type': 'str'}, + 'origin': {'key': 'origin', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(LinkedInfo, self).__init__(**kwargs) + self.linked_id = kwargs.get('linked_id', None) + self.linked_resource_name = kwargs.get('linked_resource_name', None) + self.origin = kwargs.get('origin', None) + + +class ListNotebookKeysResult(Model): + """ListNotebookKeysResult. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar primary_access_key: + :vartype primary_access_key: str + :ivar secondary_access_key: + :vartype secondary_access_key: str + """ + + _validation = { + 'primary_access_key': {'readonly': True}, + 'secondary_access_key': {'readonly': True}, + } + + _attribute_map = { + 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'}, + 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ListNotebookKeysResult, self).__init__(**kwargs) + self.primary_access_key = None + self.secondary_access_key = None + + +class ListStorageAccountKeysResult(Model): + """ListStorageAccountKeysResult. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar user_storage_key: + :vartype user_storage_key: str + """ + + _validation = { + 'user_storage_key': {'readonly': True}, + } + + _attribute_map = { + 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ListStorageAccountKeysResult, self).__init__(**kwargs) + self.user_storage_key = None + + +class ListWorkspaceKeysResult(Model): + """ListWorkspaceKeysResult. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar user_storage_key: + :vartype user_storage_key: str + :ivar user_storage_resource_id: + :vartype user_storage_resource_id: str + :ivar app_insights_instrumentation_key: + :vartype app_insights_instrumentation_key: str + :ivar container_registry_credentials: + :vartype container_registry_credentials: + ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult + :ivar notebook_access_keys: + :vartype notebook_access_keys: + ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult + """ + + _validation = { + 'user_storage_key': {'readonly': True}, + 'user_storage_resource_id': {'readonly': True}, + 'app_insights_instrumentation_key': {'readonly': True}, + 'container_registry_credentials': {'readonly': True}, + 'notebook_access_keys': {'readonly': True}, + } + + _attribute_map = { + 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, + 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'}, + 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, + 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'}, + 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'ListNotebookKeysResult'}, + } + + def __init__(self, **kwargs): + super(ListWorkspaceKeysResult, self).__init__(**kwargs) + self.user_storage_key = None + self.user_storage_resource_id = None + self.app_insights_instrumentation_key = None + self.container_registry_credentials = None + self.notebook_access_keys = None + + +class ManagedIdentity(IdentityConfiguration): + """Managed identity configuration. + + All required parameters must be populated in order to send to Azure. + + :param identity_type: Required. Constant filled by server. + :type identity_type: str + :param client_id: Specifies a user-assigned identity by client ID. For + system-assigned, do not set this field. + :type client_id: str + :param object_id: Specifies a user-assigned identity by object ID. For + system-assigned, do not set this field. + :type object_id: str + :param resource_id: Specifies a user-assigned identity by ARM resource ID. + For system-assigned, do not set this field. + :type resource_id: str + """ + + _validation = { + 'identity_type': {'required': True}, + } + + _attribute_map = { + 'identity_type': {'key': 'identityType', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'object_id': {'key': 'objectId', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ManagedIdentity, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.object_id = kwargs.get('object_id', None) + self.resource_id = kwargs.get('resource_id', None) + self.identity_type = 'Managed' + + +class ManagedOnlineDeployment(OnlineDeployment): + """ManagedOnlineDeployment. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param app_insights_enabled: If true, enables Application Insights + logging. + :type app_insights_enabled: bool + :param code_configuration: Code configuration for the endpoint deployment. + :type code_configuration: + ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :param description: Description of the endpoint deployment. + :type description: str + :param environment_id: ARM resource ID of the environment specification + for the endpoint deployment. + :type environment_id: str + :param environment_variables: Environment variables configuration for the + deployment. + :type environment_variables: dict[str, str] + :param liveness_probe: Deployment container liveness/readiness probe + configuration. + :type liveness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + :param model: Reference to the model asset for the endpoint deployment. + :type model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase + :param properties: Property dictionary. Properties can be added, but not + removed or altered. + :type properties: dict[str, str] + :ivar provisioning_state: Provisioning state for the endpoint deployment. + Possible values include: 'Creating', 'Deleting', 'Scaling', 'Updating', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :param request_settings: Online deployment scoring requests configuration. + :type request_settings: + ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :param scale_settings: Online deployment scaling configuration. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :param endpoint_compute_type: Required. Constant filled by server. + :type endpoint_compute_type: str + :param instance_type: Compute instance type. + :type instance_type: str + :param readiness_probe: Deployment container liveness/readiness probe + configuration. + :type readiness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'endpoint_compute_type': {'required': True}, + } + + _attribute_map = { + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'model': {'key': 'model', 'type': 'AssetReferenceBase'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + 'instance_type': {'key': 'instanceType', 'type': 'str'}, + 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, + } + + def __init__(self, **kwargs): + super(ManagedOnlineDeployment, self).__init__(**kwargs) + self.instance_type = kwargs.get('instance_type', None) + self.readiness_probe = kwargs.get('readiness_probe', None) + self.endpoint_compute_type = 'Managed' + + +class ManualScaleSettings(OnlineScaleSettings): + """ManualScaleSettings. + + All required parameters must be populated in order to send to Azure. + + :param max_instances: Maximum number of instances for this deployment. + :type max_instances: int + :param min_instances: Minimum number of instances for this deployment. + :type min_instances: int + :param scale_type: Required. Constant filled by server. + :type scale_type: str + :param instance_count: Fixed number of instances for this deployment. + :type instance_count: int + """ + + _validation = { + 'scale_type': {'required': True}, + } + + _attribute_map = { + 'max_instances': {'key': 'maxInstances', 'type': 'int'}, + 'min_instances': {'key': 'minInstances', 'type': 'int'}, + 'scale_type': {'key': 'scaleType', 'type': 'str'}, + 'instance_count': {'key': 'instanceCount', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(ManualScaleSettings, self).__init__(**kwargs) + self.instance_count = kwargs.get('instance_count', None) + self.scale_type = 'Manual' + + +class MedianStoppingPolicy(EarlyTerminationPolicy): + """Defines an early termination policy based on running averages of the + primary metric of all runs. + + All required parameters must be populated in order to send to Azure. + + :param delay_evaluation: Number of intervals by which to delay the first + evaluation. + :type delay_evaluation: int + :param evaluation_interval: Interval (number of runs) between policy + evaluations. + :type evaluation_interval: int + :param policy_type: Required. Constant filled by server. + :type policy_type: str + """ + + _validation = { + 'policy_type': {'required': True}, + } + + _attribute_map = { + 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, + 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, + 'policy_type': {'key': 'policyType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(MedianStoppingPolicy, self).__init__(**kwargs) + self.policy_type = 'MedianStopping' + + +class MLAssistConfiguration(Model): + """Labeling MLAssist configuration definition. + + :param inferencing_compute_binding: AML compute binding used in + inferencing. + :type inferencing_compute_binding: + ~azure.mgmt.machinelearningservices.models.ComputeConfiguration + :param ml_assist_enabled: Indicates whether MLAssist feature is enabled. + :type ml_assist_enabled: bool + :param training_compute_binding: AML compute binding used in training. + :type training_compute_binding: + ~azure.mgmt.machinelearningservices.models.ComputeConfiguration + """ + + _attribute_map = { + 'inferencing_compute_binding': {'key': 'inferencingComputeBinding', 'type': 'ComputeConfiguration'}, + 'ml_assist_enabled': {'key': 'mlAssistEnabled', 'type': 'bool'}, + 'training_compute_binding': {'key': 'trainingComputeBinding', 'type': 'ComputeConfiguration'}, + } + + def __init__(self, **kwargs): + super(MLAssistConfiguration, self).__init__(**kwargs) + self.inferencing_compute_binding = kwargs.get('inferencing_compute_binding', None) + self.ml_assist_enabled = kwargs.get('ml_assist_enabled', None) + self.training_compute_binding = kwargs.get('training_compute_binding', None) + + +class ModelContainer(Model): + """ModelContainer. + + :param description: The asset description text. + :type description: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(ModelContainer, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + self.properties = kwargs.get('properties', None) + self.tags = kwargs.get('tags', None) + + +class ModelContainerResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.ModelContainer + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ModelContainer'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(ModelContainerResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class ModelVersion(Model): + """Model asset version details. + + All required parameters must be populated in order to send to Azure. + + :param datastore_id: ARM resource ID of the datastore where the asset is + located. + :type datastore_id: str + :param description: The asset description text. + :type description: str + :param flavors: Mapping of model flavors to their properties. + :type flavors: dict[str, + ~azure.mgmt.machinelearningservices.models.FlavorData] + :param is_anonymous: If the name version are system generated (anonymous + registration). + :type is_anonymous: bool + :param path: Required. The path of the file/directory in the datastore. + :type path: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _validation = { + 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'flavors': {'key': 'flavors', 'type': '{FlavorData}'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'path': {'key': 'path', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(ModelVersion, self).__init__(**kwargs) + self.datastore_id = kwargs.get('datastore_id', None) + self.description = kwargs.get('description', None) + self.flavors = kwargs.get('flavors', None) + self.is_anonymous = kwargs.get('is_anonymous', None) + self.path = kwargs.get('path', None) + self.properties = kwargs.get('properties', None) + self.tags = kwargs.get('tags', None) + + +class ModelVersionResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.ModelVersion + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ModelVersion'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(ModelVersionResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class Mpi(DistributionConfiguration): + """MPI distribution configuration. + + All required parameters must be populated in order to send to Azure. + + :param distribution_type: Required. Constant filled by server. + :type distribution_type: str + :param process_count_per_instance: Number of processes per MPI node. + :type process_count_per_instance: int + """ + + _validation = { + 'distribution_type': {'required': True}, + } + + _attribute_map = { + 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(Mpi, self).__init__(**kwargs) + self.process_count_per_instance = kwargs.get('process_count_per_instance', None) + self.distribution_type = 'Mpi' + + +class NodeStateCounts(Model): + """Counts of various compute node states on the amlCompute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar idle_node_count: Idle node count. Number of compute nodes in idle + state. + :vartype idle_node_count: int + :ivar running_node_count: Running node count. Number of compute nodes + which are running jobs. + :vartype running_node_count: int + :ivar preparing_node_count: Preparing node count. Number of compute nodes + which are being prepared. + :vartype preparing_node_count: int + :ivar unusable_node_count: Unusable node count. Number of compute nodes + which are in unusable state. + :vartype unusable_node_count: int + :ivar leaving_node_count: Leaving node count. Number of compute nodes + which are leaving the amlCompute. + :vartype leaving_node_count: int + :ivar preempted_node_count: Preempted node count. Number of compute nodes + which are in preempted state. + :vartype preempted_node_count: int + """ + + _validation = { + 'idle_node_count': {'readonly': True}, + 'running_node_count': {'readonly': True}, + 'preparing_node_count': {'readonly': True}, + 'unusable_node_count': {'readonly': True}, + 'leaving_node_count': {'readonly': True}, + 'preempted_node_count': {'readonly': True}, + } + + _attribute_map = { + 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, + 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, + 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, + 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, + 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, + 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(NodeStateCounts, self).__init__(**kwargs) + self.idle_node_count = None + self.running_node_count = None + self.preparing_node_count = None + self.unusable_node_count = None + self.leaving_node_count = None + self.preempted_node_count = None + + +class NoneDatastoreCredentials(DatastoreCredentials): + """Empty/none datastore credentials. + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + :param secrets: Empty/none datastore secret. + :type secrets: + ~azure.mgmt.machinelearningservices.models.NoneDatastoreSecrets + """ + + _validation = { + 'credentials_type': {'required': True}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'NoneDatastoreSecrets'}, + } + + def __init__(self, **kwargs): + super(NoneDatastoreCredentials, self).__init__(**kwargs) + self.secrets = kwargs.get('secrets', None) + self.credentials_type = 'None' + + +class NoneDatastoreSecrets(DatastoreSecrets): + """Empty/none datastore secret. + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(NoneDatastoreSecrets, self).__init__(**kwargs) + self.secrets_type = 'None' + + +class NotebookAccessTokenResult(Model): + """NotebookAccessTokenResult. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar notebook_resource_id: + :vartype notebook_resource_id: str + :ivar host_name: + :vartype host_name: str + :ivar public_dns: + :vartype public_dns: str + :ivar access_token: + :vartype access_token: str + :ivar token_type: + :vartype token_type: str + :ivar expires_in: + :vartype expires_in: int + :ivar refresh_token: + :vartype refresh_token: str + :ivar scope: + :vartype scope: str + """ + + _validation = { + 'notebook_resource_id': {'readonly': True}, + 'host_name': {'readonly': True}, + 'public_dns': {'readonly': True}, + 'access_token': {'readonly': True}, + 'token_type': {'readonly': True}, + 'expires_in': {'readonly': True}, + 'refresh_token': {'readonly': True}, + 'scope': {'readonly': True}, + } + + _attribute_map = { + 'notebook_resource_id': {'key': 'notebookResourceId', 'type': 'str'}, + 'host_name': {'key': 'hostName', 'type': 'str'}, + 'public_dns': {'key': 'publicDns', 'type': 'str'}, + 'access_token': {'key': 'accessToken', 'type': 'str'}, + 'token_type': {'key': 'tokenType', 'type': 'str'}, + 'expires_in': {'key': 'expiresIn', 'type': 'int'}, + 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, + 'scope': {'key': 'scope', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(NotebookAccessTokenResult, self).__init__(**kwargs) + self.notebook_resource_id = None + self.host_name = None + self.public_dns = None + self.access_token = None + self.token_type = None + self.expires_in = None + self.refresh_token = None + self.scope = None + + +class NotebookPreparationError(Model): + """NotebookPreparationError. + + :param error_message: + :type error_message: str + :param status_code: + :type status_code: int + """ + + _attribute_map = { + 'error_message': {'key': 'errorMessage', 'type': 'str'}, + 'status_code': {'key': 'statusCode', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(NotebookPreparationError, self).__init__(**kwargs) + self.error_message = kwargs.get('error_message', None) + self.status_code = kwargs.get('status_code', None) + + +class NotebookResourceInfo(Model): + """NotebookResourceInfo. + + :param fqdn: + :type fqdn: str + :param resource_id: the data plane resourceId that used to initialize + notebook component + :type resource_id: str + :param notebook_preparation_error: The error that occurs when preparing + notebook. + :type notebook_preparation_error: + ~azure.mgmt.machinelearningservices.models.NotebookPreparationError + """ + + _attribute_map = { + 'fqdn': {'key': 'fqdn', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'}, + } + + def __init__(self, **kwargs): + super(NotebookResourceInfo, self).__init__(**kwargs) + self.fqdn = kwargs.get('fqdn', None) + self.resource_id = kwargs.get('resource_id', None) + self.notebook_preparation_error = kwargs.get('notebook_preparation_error', None) + + +class Objective(Model): + """Optimization objective. + + All required parameters must be populated in order to send to Azure. + + :param goal: Required. Defines supported metric goals for hyperparameter + tuning. Possible values include: 'Minimize', 'Maximize' + :type goal: str or ~azure.mgmt.machinelearningservices.models.Goal + :param primary_metric: Required. Name of the metric to optimize. + :type primary_metric: str + """ + + _validation = { + 'goal': {'required': True}, + 'primary_metric': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'goal': {'key': 'goal', 'type': 'str'}, + 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Objective, self).__init__(**kwargs) + self.goal = kwargs.get('goal', None) + self.primary_metric = kwargs.get('primary_metric', None) + + +class OnlineDeploymentTrackedResource(TrackedResource): + """OnlineDeploymentTrackedResource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives + :type location: str + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'OnlineDeployment'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(OnlineDeploymentTrackedResource, self).__init__(**kwargs) + self.identity = kwargs.get('identity', None) + self.kind = kwargs.get('kind', None) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class OnlineEndpoint(Model): + """Online endpoint configuration. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param auth_mode: Required. Inference endpoint authentication mode type. + Possible values include: 'AMLToken', 'Key', 'AADToken' + :type auth_mode: str or + ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :param description: Description of the inference endpoint. + :type description: str + :param keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be + retrieved using the ListKeys API. + :type keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :param properties: Property dictionary. Properties can be added, but not + removed or altered. + :type properties: dict[str, str] + :ivar provisioning_state: State of endpoint provisioning. Possible values + include: 'Creating', 'Deleting', 'Succeeded', 'Failed', 'Updating', + 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState + :ivar scoring_uri: Endpoint URI. + :vartype scoring_uri: str + :ivar swagger_uri: Endpoint Swagger URI. + :vartype swagger_uri: str + :param target: ARM resource ID of the compute if it exists. + optional + :type target: str + :param traffic: Traffic rules on how the traffic will be routed across + deployments. + :type traffic: dict[str, int] + """ + + _validation = { + 'auth_mode': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'scoring_uri': {'readonly': True}, + 'swagger_uri': {'readonly': True}, + } + + _attribute_map = { + 'auth_mode': {'key': 'authMode', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, + 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'traffic': {'key': 'traffic', 'type': '{int}'}, + } + + def __init__(self, **kwargs): + super(OnlineEndpoint, self).__init__(**kwargs) + self.auth_mode = kwargs.get('auth_mode', None) + self.description = kwargs.get('description', None) + self.keys = kwargs.get('keys', None) + self.properties = kwargs.get('properties', None) + self.provisioning_state = None + self.scoring_uri = None + self.swagger_uri = None + self.target = kwargs.get('target', None) + self.traffic = kwargs.get('traffic', None) + + +class OnlineEndpointTrackedResource(TrackedResource): + """OnlineEndpointTrackedResource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives + :type location: str + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'OnlineEndpoint'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(OnlineEndpointTrackedResource, self).__init__(**kwargs) + self.identity = kwargs.get('identity', None) + self.kind = kwargs.get('kind', None) + self.properties = kwargs.get('properties', None) + self.system_data = None + + +class OnlineRequestSettings(Model): + """Online deployment scoring requests configuration. + + :param max_concurrent_requests_per_instance: The number of requests + allowed to queue at once for this deployment. + :type max_concurrent_requests_per_instance: int + :param max_queue_wait: The maximum queue wait time in ISO 8601 format. + Supports millisecond precision. + :type max_queue_wait: timedelta + :param request_timeout: The request timeout in ISO 8601 format. Supports + millisecond precision. + :type request_timeout: timedelta + """ + + _attribute_map = { + 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'}, + 'max_queue_wait': {'key': 'maxQueueWait', 'type': 'duration'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'duration'}, + } + + def __init__(self, **kwargs): + super(OnlineRequestSettings, self).__init__(**kwargs) + self.max_concurrent_requests_per_instance = kwargs.get('max_concurrent_requests_per_instance', None) + self.max_queue_wait = kwargs.get('max_queue_wait', None) + self.request_timeout = kwargs.get('request_timeout', None) + + +class Operation(Model): + """Azure Machine Learning workspace REST API operation. + + :param name: Operation name: {provider}/{resource}/{operation} + :type name: str + :param display: Display name of operation + :type display: ~azure.mgmt.machinelearningservices.models.OperationDisplay + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + } + + def __init__(self, **kwargs): + super(Operation, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display = kwargs.get('display', None) + + +class OperationDisplay(Model): + """Display name of operation. + + :param provider: The resource provider name: + Microsoft.MachineLearningExperimentation + :type provider: str + :param resource: The resource on which the operation is performed. + :type resource: str + :param operation: The operation that users can perform. + :type operation: str + :param description: The description for the operation. + :type description: str + """ + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = kwargs.get('provider', None) + self.resource = kwargs.get('resource', None) + self.operation = kwargs.get('operation', None) + self.description = kwargs.get('description', None) + + +class OutputDataBinding(Model): + """OutputDataBinding. + + :param datastore_id: ARM resource ID of the datastore where the data + output will be stored. + :type datastore_id: str + :param mode: Mechanism for data movement to datastore. Possible values + include: 'Mount', 'Download', 'Upload' + :type mode: str or + ~azure.mgmt.machinelearningservices.models.DataBindingMode + :param path_on_compute: Location of data inside the container process. + :type path_on_compute: str + :param path_on_datastore: Path within the datastore to the data. + :type path_on_datastore: str + """ + + _attribute_map = { + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + 'mode': {'key': 'mode', 'type': 'str'}, + 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, + 'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OutputDataBinding, self).__init__(**kwargs) + self.datastore_id = kwargs.get('datastore_id', None) + self.mode = kwargs.get('mode', None) + self.path_on_compute = kwargs.get('path_on_compute', None) + self.path_on_datastore = kwargs.get('path_on_datastore', None) + + +class OutputPathAssetReference(AssetReferenceBase): + """Reference to an asset via its path in a job output. + + All required parameters must be populated in order to send to Azure. + + :param reference_type: Required. Constant filled by server. + :type reference_type: str + :param job_id: ARM resource ID of the job. + :type job_id: str + :param path: The path of the file/directory in the job output. + :type path: str + """ + + _validation = { + 'reference_type': {'required': True}, + } + + _attribute_map = { + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + 'job_id': {'key': 'jobId', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(OutputPathAssetReference, self).__init__(**kwargs) + self.job_id = kwargs.get('job_id', None) + self.path = kwargs.get('path', None) + self.reference_type = 'OutputPath' + + +class PartialOnlineDeployment(Model): + """Mutable online deployment configuration. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: PartialAksOnlineDeployment, PartialManagedOnlineDeployment + + All required parameters must be populated in order to send to Azure. + + :param app_insights_enabled: Whether AppInsights telemetry is enabled for + this online deployment. + :type app_insights_enabled: bool + :param liveness_probe: Deployment container liveness/readiness probe + configuration. + :type liveness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + :param request_settings: Online deployment scoring requests configuration. + :type request_settings: + ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :param scale_settings: Online deployment scaling configuration. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :param endpoint_compute_type: Required. Constant filled by server. + :type endpoint_compute_type: str + """ + + _validation = { + 'endpoint_compute_type': {'required': True}, + } + + _attribute_map = { + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + } + + _subtype_map = { + 'endpoint_compute_type': {'K8S': 'PartialAksOnlineDeployment', 'Managed': 'PartialManagedOnlineDeployment'} + } + + def __init__(self, **kwargs): + super(PartialOnlineDeployment, self).__init__(**kwargs) + self.app_insights_enabled = kwargs.get('app_insights_enabled', None) + self.liveness_probe = kwargs.get('liveness_probe', None) + self.request_settings = kwargs.get('request_settings', None) + self.scale_settings = kwargs.get('scale_settings', None) + self.endpoint_compute_type = None + + +class PartialAksOnlineDeployment(PartialOnlineDeployment): + """PartialAksOnlineDeployment. + + All required parameters must be populated in order to send to Azure. + + :param app_insights_enabled: Whether AppInsights telemetry is enabled for + this online deployment. + :type app_insights_enabled: bool + :param liveness_probe: Deployment container liveness/readiness probe + configuration. + :type liveness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + :param request_settings: Online deployment scoring requests configuration. + :type request_settings: + ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :param scale_settings: Online deployment scaling configuration. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :param endpoint_compute_type: Required. Constant filled by server. + :type endpoint_compute_type: str + :param container_resource_requirements: Resource requirements for each + container instance within an online deployment. + :type container_resource_requirements: + ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements + """ + + _validation = { + 'endpoint_compute_type': {'required': True}, + } + + _attribute_map = { + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'}, + } + + def __init__(self, **kwargs): + super(PartialAksOnlineDeployment, self).__init__(**kwargs) + self.container_resource_requirements = kwargs.get('container_resource_requirements', None) + self.endpoint_compute_type = 'K8S' + + +class PartialBatchDeployment(Model): + """Mutable batch inference settings per deployment. + + :param description: Description of the endpoint deployment. + :type description: str + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PartialBatchDeployment, self).__init__(**kwargs) + self.description = kwargs.get('description', None) + + +class PartialBatchDeploymentPartialTrackedResource(Model): + """Strictly used in update requests. + + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param location: The geo-location where the resource lives. + :type location: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.PartialBatchDeployment + :param tags: Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PartialBatchDeployment'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(PartialBatchDeploymentPartialTrackedResource, self).__init__(**kwargs) + self.identity = kwargs.get('identity', None) + self.kind = kwargs.get('kind', None) + self.location = kwargs.get('location', None) + self.properties = kwargs.get('properties', None) + self.tags = kwargs.get('tags', None) + + +class PartialBatchEndpoint(Model): + """Mutable Batch endpoint configuration. + + :param traffic: Traffic rules on how the traffic will be routed across + deployments. + :type traffic: dict[str, int] + """ + + _attribute_map = { + 'traffic': {'key': 'traffic', 'type': '{int}'}, + } + + def __init__(self, **kwargs): + super(PartialBatchEndpoint, self).__init__(**kwargs) + self.traffic = kwargs.get('traffic', None) + + +class PartialBatchEndpointPartialTrackedResource(Model): + """Strictly used in update requests. + + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param location: The geo-location where the resource lives. + :type location: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.PartialBatchEndpoint + :param tags: Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PartialBatchEndpoint'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(PartialBatchEndpointPartialTrackedResource, self).__init__(**kwargs) + self.identity = kwargs.get('identity', None) + self.kind = kwargs.get('kind', None) + self.location = kwargs.get('location', None) + self.properties = kwargs.get('properties', None) + self.tags = kwargs.get('tags', None) + + +class PartialManagedOnlineDeployment(PartialOnlineDeployment): + """PartialManagedOnlineDeployment. + + All required parameters must be populated in order to send to Azure. + + :param app_insights_enabled: Whether AppInsights telemetry is enabled for + this online deployment. + :type app_insights_enabled: bool + :param liveness_probe: Deployment container liveness/readiness probe + configuration. + :type liveness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + :param request_settings: Online deployment scoring requests configuration. + :type request_settings: + ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :param scale_settings: Online deployment scaling configuration. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :param endpoint_compute_type: Required. Constant filled by server. + :type endpoint_compute_type: str + :param readiness_probe: Deployment container liveness/readiness probe + configuration. + :type readiness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + """ + + _validation = { + 'endpoint_compute_type': {'required': True}, + } + + _attribute_map = { + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, + } + + def __init__(self, **kwargs): + super(PartialManagedOnlineDeployment, self).__init__(**kwargs) + self.readiness_probe = kwargs.get('readiness_probe', None) + self.endpoint_compute_type = 'Managed' + + +class PartialOnlineDeploymentPartialTrackedResource(Model): + """Strictly used in update requests. + + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param location: The geo-location where the resource lives. + :type location: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.PartialOnlineDeployment + :param tags: Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PartialOnlineDeployment'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(PartialOnlineDeploymentPartialTrackedResource, self).__init__(**kwargs) + self.identity = kwargs.get('identity', None) + self.kind = kwargs.get('kind', None) + self.location = kwargs.get('location', None) + self.properties = kwargs.get('properties', None) + self.tags = kwargs.get('tags', None) + + +class PartialOnlineEndpoint(Model): + """Mutable online endpoint configuration. + + :param traffic: Traffic rules on how the traffic will be routed across + deployments. + :type traffic: dict[str, int] + """ + + _attribute_map = { + 'traffic': {'key': 'traffic', 'type': '{int}'}, + } + + def __init__(self, **kwargs): + super(PartialOnlineEndpoint, self).__init__(**kwargs) + self.traffic = kwargs.get('traffic', None) + + +class PartialOnlineEndpointPartialTrackedResource(Model): + """Strictly used in update requests. + + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param location: The geo-location where the resource lives. + :type location: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.PartialOnlineEndpoint + :param tags: Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PartialOnlineEndpoint'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, **kwargs): + super(PartialOnlineEndpointPartialTrackedResource, self).__init__(**kwargs) + self.identity = kwargs.get('identity', None) + self.kind = kwargs.get('kind', None) + self.location = kwargs.get('location', None) + self.properties = kwargs.get('properties', None) + self.tags = kwargs.get('tags', None) + + +class Password(Model): + """Password. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: + :vartype name: str + :ivar value: + :vartype value: str + """ + + _validation = { + 'name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Password, self).__init__(**kwargs) + self.name = None + self.value = None + + +class PersonalComputeInstanceSettings(Model): + """Settings for a personal compute instance. + + :param assigned_user: Assigned User. A user explicitly assigned to a + personal compute instance. + :type assigned_user: + ~azure.mgmt.machinelearningservices.models.AssignedUser + """ + + _attribute_map = { + 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'}, + } + + def __init__(self, **kwargs): + super(PersonalComputeInstanceSettings, self).__init__(**kwargs) + self.assigned_user = kwargs.get('assigned_user', None) + + +class PrivateEndpoint(Model): + """The Private Endpoint resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The ARM identifier for Private Endpoint + :vartype id: str + :ivar subnet_arm_id: The ARM identifier for Subnet resource that private + endpoint links to + :vartype subnet_arm_id: str + """ + + _validation = { + 'id': {'readonly': True}, + 'subnet_arm_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = None + self.subnet_arm_id = None + + +class PrivateEndpointConnection(Resource): + """The Private Endpoint Connection resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param private_endpoint: The resource of private end point. + :type private_endpoint: + ~azure.mgmt.machinelearningservices.models.PrivateEndpoint + :param private_link_service_connection_state: Required. A collection of + information about the state of the connection between service consumer and + provider. + :type private_link_service_connection_state: + ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState + :param provisioning_state: The provisioning state of the private endpoint + connection resource. Possible values include: 'Succeeded', 'Creating', + 'Deleting', 'Failed' + :type provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionProvisioningState + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.machinelearningservices.models.Identity + :param location: Specifies the location of the resource. + :type location: str + :param tags: Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure.mgmt.machinelearningservices.models.Sku + :param system_data: + :type system_data: ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'private_link_service_connection_state': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(PrivateEndpointConnection, self).__init__(**kwargs) + self.private_endpoint = kwargs.get('private_endpoint', None) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + self.provisioning_state = kwargs.get('provisioning_state', None) + self.identity = kwargs.get('identity', None) + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) + self.sku = kwargs.get('sku', None) + self.system_data = kwargs.get('system_data', None) + + +class PrivateLinkResource(Resource): + """A private link resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar required_members: The private link resource required member names. + :vartype required_members: list[str] + :param required_zone_names: The private link resource Private link DNS + zone name. + :type required_zone_names: list[str] + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.machinelearningservices.models.Identity + :param location: Specifies the location of the resource. + :type location: str + :param tags: Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure.mgmt.machinelearningservices.models.Sku + :param system_data: + :type system_data: ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'group_id': {'readonly': True}, + 'required_members': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, **kwargs): + super(PrivateLinkResource, self).__init__(**kwargs) + self.group_id = None + self.required_members = None + self.required_zone_names = kwargs.get('required_zone_names', None) + self.identity = kwargs.get('identity', None) + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) + self.sku = kwargs.get('sku', None) + self.system_data = kwargs.get('system_data', None) + + +class PrivateLinkResourceListResult(Model): + """A list of private link resources. + + :param value: Array of private link resources + :type value: + list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + } + + def __init__(self, **kwargs): + super(PrivateLinkResourceListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class PrivateLinkServiceConnectionState(Model): + """A collection of information about the state of the connection between + service consumer and provider. + + :param status: Indicates whether the connection has been + Approved/Rejected/Removed by the owner of the service. Possible values + include: 'Pending', 'Approved', 'Rejected', 'Disconnected', 'Timeout' + :type status: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + :param description: The reason for approval/rejection of the connection. + :type description: str + :param actions_required: A message indicating if changes on the service + provider require any updates on the consumer. + :type actions_required: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.description = kwargs.get('description', None) + self.actions_required = kwargs.get('actions_required', None) + + +class ProbeSettings(Model): + """Deployment container liveness/readiness probe configuration. + + :param failure_threshold: The number of failures to allow before returning + an unhealthy status. + :type failure_threshold: int + :param initial_delay: The delay before the first probe in ISO 8601 format. + :type initial_delay: timedelta + :param period: The length of time between probes in ISO 8601 format. + :type period: timedelta + :param success_threshold: The number of successful probes before returning + a healthy status. + :type success_threshold: int + :param timeout: The probe timeout in ISO 8601 format. + :type timeout: timedelta + """ + + _attribute_map = { + 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'}, + 'initial_delay': {'key': 'initialDelay', 'type': 'duration'}, + 'period': {'key': 'period', 'type': 'duration'}, + 'success_threshold': {'key': 'successThreshold', 'type': 'int'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + } + + def __init__(self, **kwargs): + super(ProbeSettings, self).__init__(**kwargs) + self.failure_threshold = kwargs.get('failure_threshold', None) + self.initial_delay = kwargs.get('initial_delay', None) + self.period = kwargs.get('period', None) + self.success_threshold = kwargs.get('success_threshold', None) + self.timeout = kwargs.get('timeout', None) + + +class ProgressMetrics(Model): + """Progress metrics definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar completed_datapoint_count: The completed datapoint count. + :vartype completed_datapoint_count: long + :ivar incremental_dataset_last_refresh_time: The time of last successful + incremental dataset refresh in UTC. + :vartype incremental_dataset_last_refresh_time: datetime + :ivar skipped_datapoint_count: The skipped datapoint count. + :vartype skipped_datapoint_count: long + :ivar total_datapoint_count: The total datapoint count. + :vartype total_datapoint_count: long + """ + + _validation = { + 'completed_datapoint_count': {'readonly': True}, + 'incremental_dataset_last_refresh_time': {'readonly': True}, + 'skipped_datapoint_count': {'readonly': True}, + 'total_datapoint_count': {'readonly': True}, + } + + _attribute_map = { + 'completed_datapoint_count': {'key': 'completedDatapointCount', 'type': 'long'}, + 'incremental_dataset_last_refresh_time': {'key': 'incrementalDatasetLastRefreshTime', 'type': 'iso-8601'}, + 'skipped_datapoint_count': {'key': 'skippedDatapointCount', 'type': 'long'}, + 'total_datapoint_count': {'key': 'totalDatapointCount', 'type': 'long'}, + } + + def __init__(self, **kwargs): + super(ProgressMetrics, self).__init__(**kwargs) + self.completed_datapoint_count = None + self.incremental_dataset_last_refresh_time = None + self.skipped_datapoint_count = None + self.total_datapoint_count = None + + +class ProxyResource(Resource): + """Proxy Resource. + + The resource model definition for a Azure Resource Manager proxy resource. + It will not have tags and a location. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ProxyResource, self).__init__(**kwargs) + + +class PyTorch(DistributionConfiguration): + """PyTorch distribution configuration. + + All required parameters must be populated in order to send to Azure. + + :param distribution_type: Required. Constant filled by server. + :type distribution_type: str + :param process_count: Total process count for the distributed job. + :type process_count: int + """ + + _validation = { + 'distribution_type': {'required': True}, + } + + _attribute_map = { + 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + 'process_count': {'key': 'processCount', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(PyTorch, self).__init__(**kwargs) + self.process_count = kwargs.get('process_count', None) + self.distribution_type = 'PyTorch' + + +class QuotaBaseProperties(Model): + """The properties for Quota update or retrieval. + + :param id: Specifies the resource ID. + :type id: str + :param type: Specifies the resource type. + :type type: str + :param limit: Limit. The maximum permitted quota of the resource. + :type limit: long + :param unit: An enum describing the unit of quota measurement. Possible + values include: 'Count' + :type unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(QuotaBaseProperties, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.type = kwargs.get('type', None) + self.limit = kwargs.get('limit', None) + self.unit = kwargs.get('unit', None) + + +class QuotaUpdateParameters(Model): + """Quota update parameters. + + :param value: The list for update quota. + :type value: + list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] + :param location: Region of workspace quota to be updated. + :type location: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'}, + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(QuotaUpdateParameters, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.location = kwargs.get('location', None) + + +class Recurrence(Model): + """The workflow trigger recurrence for ComputeStartStop schedule type. + + :param frequency: Possible values include: 'NotSpecified', 'Second', + 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' + :type frequency: str or + ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: str + :param time_zone: The time zone. + :type time_zone: str + :param schedule: + :type schedule: + ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule + """ + + _attribute_map = { + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__(self, **kwargs): + super(Recurrence, self).__init__(**kwargs) + self.frequency = kwargs.get('frequency', None) + self.interval = kwargs.get('interval', None) + self.start_time = kwargs.get('start_time', None) + self.time_zone = kwargs.get('time_zone', None) + self.schedule = kwargs.get('schedule', None) + + +class RecurrenceSchedule(Model): + """The recurrence schedule. + + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or + ~azure.mgmt.machinelearningservices.models.DaysOfWeek] + """ + + _attribute_map = { + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, + } + + def __init__(self, **kwargs): + super(RecurrenceSchedule, self).__init__(**kwargs) + self.minutes = kwargs.get('minutes', None) + self.hours = kwargs.get('hours', None) + self.week_days = kwargs.get('week_days', None) + + +class RegenerateEndpointKeysRequest(Model): + """RegenerateEndpointKeysRequest. + + All required parameters must be populated in order to send to Azure. + + :param key_type: Required. Specification for which type of key to + generate. Primary or Secondary. Possible values include: 'Primary', + 'Secondary' + :type key_type: str or ~azure.mgmt.machinelearningservices.models.KeyType + :param key_value: The value the key is set to. + :type key_value: str + """ + + _validation = { + 'key_type': {'required': True}, + } + + _attribute_map = { + 'key_type': {'key': 'keyType', 'type': 'str'}, + 'key_value': {'key': 'keyValue', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(RegenerateEndpointKeysRequest, self).__init__(**kwargs) + self.key_type = kwargs.get('key_type', None) + self.key_value = kwargs.get('key_value', None) class RegistryListCredentialsResult(Model): @@ -1338,173 +7161,1343 @@ class RegistryListCredentialsResult(Model): Variables are only populated by the server, and will be ignored when sending a request. - :ivar location: - :vartype location: str - :ivar username: - :vartype username: str - :param passwords: - :type passwords: list[~azure.mgmt.machinelearningservices.models.Password] + :ivar location: + :vartype location: str + :ivar username: + :vartype username: str + :param passwords: + :type passwords: list[~azure.mgmt.machinelearningservices.models.Password] + """ + + _validation = { + 'location': {'readonly': True}, + 'username': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'str'}, + 'passwords': {'key': 'passwords', 'type': '[Password]'}, + } + + def __init__(self, **kwargs): + super(RegistryListCredentialsResult, self).__init__(**kwargs) + self.location = None + self.username = None + self.passwords = kwargs.get('passwords', None) + + +class ResourceId(Model): + """Represents a resource ID. For example, for a subnet, it is the resource URL + for the subnet. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. The ID of the resource + :type id: str + """ + + _validation = { + 'id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ResourceId, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + + +class ResourceIdentity(Model): + """Service identity associated with a resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar principal_id: Client ID that is used when authenticating. + :vartype principal_id: str + :ivar tenant_id: AAD Tenant where this identity lives. + :vartype tenant_id: str + :param type: Defines values for a ResourceIdentity's type. Possible values + include: 'SystemAssigned', 'UserAssigned', 'SystemAssigned,UserAssigned', + 'None' + :type type: str or + ~azure.mgmt.machinelearningservices.models.ResourceIdentityAssignment + :param user_assigned_identities: Dictionary of the user assigned + identities, key is ARM resource ID of the UAI. + :type user_assigned_identities: dict[str, + ~azure.mgmt.machinelearningservices.models.UserAssignedIdentityMeta] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentityMeta}'}, + } + + def __init__(self, **kwargs): + super(ResourceIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = kwargs.get('type', None) + self.user_assigned_identities = kwargs.get('user_assigned_identities', None) + + +class ResourceName(Model): + """The Resource Name. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar value: The name of the resource. + :vartype value: str + :ivar localized_value: The localized name of the resource. + :vartype localized_value: str + """ + + _validation = { + 'value': {'readonly': True}, + 'localized_value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ResourceName, self).__init__(**kwargs) + self.value = None + self.localized_value = None + + +class ResourceQuota(Model): + """The quota assigned to a resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar aml_workspace_location: Region of the AML workspace in the id. + :vartype aml_workspace_location: str + :ivar type: Specifies the resource type. + :vartype type: str + :ivar name: Name of the resource. + :vartype name: ~azure.mgmt.machinelearningservices.models.ResourceName + :ivar limit: Limit. The maximum permitted quota of the resource. + :vartype limit: long + :ivar unit: An enum describing the unit of quota measurement. Possible + values include: 'Count' + :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + """ + + _validation = { + 'id': {'readonly': True}, + 'aml_workspace_location': {'readonly': True}, + 'type': {'readonly': True}, + 'name': {'readonly': True}, + 'limit': {'readonly': True}, + 'unit': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'ResourceName'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ResourceQuota, self).__init__(**kwargs) + self.id = None + self.aml_workspace_location = None + self.type = None + self.name = None + self.limit = None + self.unit = None + + +class ResourceSkuLocationInfo(Model): + """ResourceSkuLocationInfo. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar location: Location of the SKU + :vartype location: str + :ivar zones: List of availability zones where the SKU is supported. + :vartype zones: list[str] + :ivar zone_details: Details of capabilities available to a SKU in specific + zones. + :vartype zone_details: + list[~azure.mgmt.machinelearningservices.models.ResourceSkuZoneDetails] + """ + + _validation = { + 'location': {'readonly': True}, + 'zones': {'readonly': True}, + 'zone_details': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'zones': {'key': 'zones', 'type': '[str]'}, + 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'}, + } + + def __init__(self, **kwargs): + super(ResourceSkuLocationInfo, self).__init__(**kwargs) + self.location = None + self.zones = None + self.zone_details = None + + +class ResourceSkuZoneDetails(Model): + """Describes The zonal capabilities of a SKU. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The set of zones that the SKU is available in with the + specified capabilities. + :vartype name: list[str] + :ivar capabilities: A list of capabilities that are available for the SKU + in the specified list of zones. + :vartype capabilities: + list[~azure.mgmt.machinelearningservices.models.SKUCapability] + """ + + _validation = { + 'name': {'readonly': True}, + 'capabilities': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': '[str]'}, + 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, + } + + def __init__(self, **kwargs): + super(ResourceSkuZoneDetails, self).__init__(**kwargs) + self.name = None + self.capabilities = None + + +class Restriction(Model): + """The restriction because of which SKU cannot be used. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar type: The type of restrictions. As of now only possible value for + this is location. + :vartype type: str + :ivar values: The value of restrictions. If the restriction type is set to + location. This would be different locations where the SKU is restricted. + :vartype values: list[str] + :param reason_code: The reason for the restriction. Possible values + include: 'NotSpecified', 'NotAvailableForRegion', + 'NotAvailableForSubscription' + :type reason_code: str or + ~azure.mgmt.machinelearningservices.models.ReasonCode + """ + + _validation = { + 'type': {'readonly': True}, + 'values': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, + 'reason_code': {'key': 'reasonCode', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Restriction, self).__init__(**kwargs) + self.type = None + self.values = None + self.reason_code = kwargs.get('reason_code', None) + + +class Route(Model): + """Route. + + All required parameters must be populated in order to send to Azure. + + :param path: Required. The path for the route. + :type path: str + :param port: Required. The port for the route. + :type port: int + """ + + _validation = { + 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'port': {'required': True}, + } + + _attribute_map = { + 'path': {'key': 'path', 'type': 'str'}, + 'port': {'key': 'port', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(Route, self).__init__(**kwargs) + self.path = kwargs.get('path', None) + self.port = kwargs.get('port', None) + + +class SasDatastoreCredentials(DatastoreCredentials): + """SAS datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + :param secrets: Storage container secrets. + :type secrets: + ~azure.mgmt.machinelearningservices.models.SasDatastoreSecrets + """ + + _validation = { + 'credentials_type': {'required': True}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'SasDatastoreSecrets'}, + } + + def __init__(self, **kwargs): + super(SasDatastoreCredentials, self).__init__(**kwargs) + self.secrets = kwargs.get('secrets', None) + self.credentials_type = 'Sas' + + +class SasDatastoreSecrets(DatastoreSecrets): + """Datastore SAS secrets. + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + :param sas_token: Storage container SAS token. + :type sas_token: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SasDatastoreSecrets, self).__init__(**kwargs) + self.sas_token = kwargs.get('sas_token', None) + self.secrets_type = 'Sas' + + +class ScaleSettings(Model): + """scale settings for AML Compute. + + All required parameters must be populated in order to send to Azure. + + :param max_node_count: Required. Max number of nodes to use + :type max_node_count: int + :param min_node_count: Min number of nodes to use. Default value: 0 . + :type min_node_count: int + :param node_idle_time_before_scale_down: Node Idle Time before scaling + down amlCompute. This string needs to be in the RFC Format. + :type node_idle_time_before_scale_down: timedelta + """ + + _validation = { + 'max_node_count': {'required': True}, + } + + _attribute_map = { + 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, + 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, + 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, + } + + def __init__(self, **kwargs): + super(ScaleSettings, self).__init__(**kwargs) + self.max_node_count = kwargs.get('max_node_count', None) + self.min_node_count = kwargs.get('min_node_count', 0) + self.node_idle_time_before_scale_down = kwargs.get('node_idle_time_before_scale_down', None) + + +class ScriptReference(Model): + """Script reference. + + :param script_source: The storage source of the script: inline, workspace. + :type script_source: str + :param script_data: The location of scripts in the mounted volume. + :type script_data: str + :param script_arguments: Optional command line arguments passed to the + script to run. + :type script_arguments: str + :param timeout: Optional time period passed to timeout command. + :type timeout: str + """ + + _attribute_map = { + 'script_source': {'key': 'scriptSource', 'type': 'str'}, + 'script_data': {'key': 'scriptData', 'type': 'str'}, + 'script_arguments': {'key': 'scriptArguments', 'type': 'str'}, + 'timeout': {'key': 'timeout', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ScriptReference, self).__init__(**kwargs) + self.script_source = kwargs.get('script_source', None) + self.script_data = kwargs.get('script_data', None) + self.script_arguments = kwargs.get('script_arguments', None) + self.timeout = kwargs.get('timeout', None) + + +class ScriptsToExecute(Model): + """Customized setup scripts. + + :param startup_script: Script that's run every time the machine starts. + :type startup_script: + ~azure.mgmt.machinelearningservices.models.ScriptReference + :param creation_script: Script that's run only once during provision of + the compute. + :type creation_script: + ~azure.mgmt.machinelearningservices.models.ScriptReference + """ + + _attribute_map = { + 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'}, + 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'}, + } + + def __init__(self, **kwargs): + super(ScriptsToExecute, self).__init__(**kwargs) + self.startup_script = kwargs.get('startup_script', None) + self.creation_script = kwargs.get('creation_script', None) + + +class ServiceManagedResourcesSettings(Model): + """ServiceManagedResourcesSettings. + + :param cosmos_db: The settings for the service managed cosmosdb account. + :type cosmos_db: + ~azure.mgmt.machinelearningservices.models.CosmosDbSettings + """ + + _attribute_map = { + 'cosmos_db': {'key': 'cosmosDb', 'type': 'CosmosDbSettings'}, + } + + def __init__(self, **kwargs): + super(ServiceManagedResourcesSettings, self).__init__(**kwargs) + self.cosmos_db = kwargs.get('cosmos_db', None) + + +class ServicePrincipalCredentials(Model): + """Service principal credentials. + + All required parameters must be populated in order to send to Azure. + + :param client_id: Required. Client Id + :type client_id: str + :param client_secret: Required. Client secret + :type client_secret: str + """ + + _validation = { + 'client_id': {'required': True}, + 'client_secret': {'required': True}, + } + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ServicePrincipalCredentials, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.client_secret = kwargs.get('client_secret', None) + + +class ServicePrincipalDatastoreCredentials(DatastoreCredentials): + """Service Principal datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + :param authority_url: Authority URL used for authentication. + :type authority_url: str + :param client_id: Required. Service principal client ID. + :type client_id: str + :param resource_uri: Resource the service principal has access to. + :type resource_uri: str + :param secrets: Service principal secrets. + :type secrets: + ~azure.mgmt.machinelearningservices.models.ServicePrincipalDatastoreSecrets + :param tenant_id: Required. ID of the tenant to which the service + principal belongs. + :type tenant_id: str + """ + + _validation = { + 'credentials_type': {'required': True}, + 'client_id': {'required': True}, + 'tenant_id': {'required': True}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'authority_url': {'key': 'authorityUrl', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'resource_uri': {'key': 'resourceUri', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'ServicePrincipalDatastoreSecrets'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ServicePrincipalDatastoreCredentials, self).__init__(**kwargs) + self.authority_url = kwargs.get('authority_url', None) + self.client_id = kwargs.get('client_id', None) + self.resource_uri = kwargs.get('resource_uri', None) + self.secrets = kwargs.get('secrets', None) + self.tenant_id = kwargs.get('tenant_id', None) + self.credentials_type = 'ServicePrincipal' + + +class ServicePrincipalDatastoreSecrets(DatastoreSecrets): + """Datastore Service Principal secrets. + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + :param client_secret: Service principal secret. + :type client_secret: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(ServicePrincipalDatastoreSecrets, self).__init__(**kwargs) + self.client_secret = kwargs.get('client_secret', None) + self.secrets_type = 'ServicePrincipal' + + +class SetupScripts(Model): + """Details of customized scripts to execute for setting up the cluster. + + :param scripts: Customized setup scripts + :type scripts: ~azure.mgmt.machinelearningservices.models.ScriptsToExecute + """ + + _attribute_map = { + 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'}, + } + + def __init__(self, **kwargs): + super(SetupScripts, self).__init__(**kwargs) + self.scripts = kwargs.get('scripts', None) + + +class SharedPrivateLinkResource(Model): + """SharedPrivateLinkResource. + + :param name: Unique name of the private link. + :type name: str + :param private_link_resource_id: The resource id that private link links + to. + :type private_link_resource_id: str + :param group_id: The private link resource group id. + :type group_id: str + :param request_message: Request message. + :type request_message: str + :param status: Indicates whether the connection has been + Approved/Rejected/Removed by the owner of the service. Possible values + include: 'Pending', 'Approved', 'Rejected', 'Disconnected', 'Timeout' + :type status: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SharedPrivateLinkResource, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.private_link_resource_id = kwargs.get('private_link_resource_id', None) + self.group_id = kwargs.get('group_id', None) + self.request_message = kwargs.get('request_message', None) + self.status = kwargs.get('status', None) + + +class Sku(Model): + """Sku of the resource. + + :param name: Name of the sku + :type name: str + :param tier: Tier of the sku like Basic or Enterprise + :type tier: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(Sku, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.tier = kwargs.get('tier', None) + + +class SKUCapability(Model): + """Features/user capabilities associated with the sku. + + :param name: Capability/Feature ID + :type name: str + :param value: Details about the feature/capability + :type value: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SKUCapability, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) + + +class SqlAdminDatastoreCredentials(DatastoreCredentials): + """SQL Admin datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + :param secrets: SQL database secrets. + :type secrets: + ~azure.mgmt.machinelearningservices.models.SqlAdminDatastoreSecrets + :param user_id: Required. SQL database user name. + :type user_id: str + """ + + _validation = { + 'credentials_type': {'required': True}, + 'user_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'SqlAdminDatastoreSecrets'}, + 'user_id': {'key': 'userId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SqlAdminDatastoreCredentials, self).__init__(**kwargs) + self.secrets = kwargs.get('secrets', None) + self.user_id = kwargs.get('user_id', None) + self.credentials_type = 'SqlAdmin' + + +class SqlAdminDatastoreSecrets(DatastoreSecrets): + """Datastore SQL Admin secrets. + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + :param password: SQL database password. + :type password: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SqlAdminDatastoreSecrets, self).__init__(**kwargs) + self.password = kwargs.get('password', None) + self.secrets_type = 'SqlAdmin' + + +class SslConfiguration(Model): + """The ssl configuration for scoring. + + :param status: Enable or disable ssl for scoring. Possible values include: + 'Disabled', 'Enabled', 'Auto' + :type status: str or ~azure.mgmt.machinelearningservices.models.enum + :param cert: Cert data + :type cert: str + :param key: Key data + :type key: str + :param cname: CNAME of the cert + :type cname: str + :param leaf_domain_label: Leaf domain label of public endpoint + :type leaf_domain_label: str + :param overwrite_existing_domain: Indicates whether to overwrite existing + domain label. + :type overwrite_existing_domain: bool + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'cert': {'key': 'cert', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'str'}, + 'cname': {'key': 'cname', 'type': 'str'}, + 'leaf_domain_label': {'key': 'leafDomainLabel', 'type': 'str'}, + 'overwrite_existing_domain': {'key': 'overwriteExistingDomain', 'type': 'bool'}, + } + + def __init__(self, **kwargs): + super(SslConfiguration, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.cert = kwargs.get('cert', None) + self.key = kwargs.get('key', None) + self.cname = kwargs.get('cname', None) + self.leaf_domain_label = kwargs.get('leaf_domain_label', None) + self.overwrite_existing_domain = kwargs.get('overwrite_existing_domain', None) + + +class StatusMessage(Model): + """Active message associated with project. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar code: Service-defined message code. + :vartype code: str + :ivar created_time_utc: Time in UTC at which the message was created. + :vartype created_time_utc: datetime + :ivar level: Severity level of message. Possible values include: 'Error', + 'Information', 'Warning' + :vartype level: str or + ~azure.mgmt.machinelearningservices.models.StatusMessageLevel + :ivar message: A human-readable representation of the message code. + :vartype message: str + """ + + _validation = { + 'code': {'readonly': True}, + 'created_time_utc': {'readonly': True}, + 'level': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'}, + 'level': {'key': 'level', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(StatusMessage, self).__init__(**kwargs) + self.code = None + self.created_time_utc = None + self.level = None + self.message = None + + +class SweepJob(JobBase): + """Sweep job definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param description: The asset description text. + :type description: str + :ivar interaction_endpoints: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of + FileStreamObject. + :vartype interaction_endpoints: dict[str, + ~azure.mgmt.machinelearningservices.models.JobEndpoint] + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :ivar provisioning_state: Specifies the job provisioning state. Possible + values include: 'Succeeded', 'Failed', 'Canceled', 'InProgress' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.JobProvisioningState + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + :param job_type: Required. Constant filled by server. + :type job_type: str + :param algorithm: Required. Type of the hyperparameter sampling + algorithms. Possible values include: 'Grid', 'Random', 'Bayesian' + :type algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithm + :param compute: Required. Compute binding for the job. + :type compute: + ~azure.mgmt.machinelearningservices.models.ComputeConfiguration + :param early_termination: Early termination policies enable canceling + poor-performing runs before they complete. + :type early_termination: + ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :param experiment_name: The name of the experiment the job belongs to. If + not set, the job is placed in the "Default" experiment. + :type experiment_name: str + :param identity: Identity configuration. If set, this should be one of + AmlToken, ManagedIdentity or null. + Defaults to AmlToken if null. + :type identity: + ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :param max_concurrent_trials: An upper bound on the number of trials + performed in parallel. + :type max_concurrent_trials: int + :param max_total_trials: An upper bound on the number of trials to + perform. + :type max_total_trials: int + :param objective: Required. Optimization objective. + :type objective: ~azure.mgmt.machinelearningservices.models.Objective + :ivar output: Location of the job output logs and artifacts. + :vartype output: ~azure.mgmt.machinelearningservices.models.JobOutput + :param priority: Job priority for scheduling policy. Only applies to + AMLCompute. + Private preview feature and only available to users on the allow list. + :type priority: int + :param search_space: Required. A dictionary containing each parameter and + its distribution. The dictionary key is the name of the parameter + :type search_space: dict[str, object] + :ivar status: The status of a job. Possible values include: 'NotStarted', + 'Starting', 'Provisioning', 'Preparing', 'Queued', 'Running', + 'Finalizing', 'CancelRequested', 'Completed', 'Failed', 'Canceled', + 'NotResponding', 'Paused', 'Unknown' + :vartype status: str or + ~azure.mgmt.machinelearningservices.models.JobStatus + :param timeout: The total timeout in ISO 8601 format. Only supports + duration with precision as low as Minutes. + :type timeout: timedelta + :param trial: Trial component definition. + :type trial: ~azure.mgmt.machinelearningservices.models.TrialComponent + """ + + _validation = { + 'interaction_endpoints': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'job_type': {'required': True}, + 'algorithm': {'required': True}, + 'compute': {'required': True}, + 'objective': {'required': True}, + 'output': {'readonly': True}, + 'search_space': {'required': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'algorithm': {'key': 'algorithm', 'type': 'str'}, + 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, + 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, + 'experiment_name': {'key': 'experimentName', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, + 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, + 'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'}, + 'objective': {'key': 'objective', 'type': 'Objective'}, + 'output': {'key': 'output', 'type': 'JobOutput'}, + 'priority': {'key': 'priority', 'type': 'int'}, + 'search_space': {'key': 'searchSpace', 'type': '{object}'}, + 'status': {'key': 'status', 'type': 'str'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + 'trial': {'key': 'trial', 'type': 'TrialComponent'}, + } + + def __init__(self, **kwargs): + super(SweepJob, self).__init__(**kwargs) + self.algorithm = kwargs.get('algorithm', None) + self.compute = kwargs.get('compute', None) + self.early_termination = kwargs.get('early_termination', None) + self.experiment_name = kwargs.get('experiment_name', None) + self.identity = kwargs.get('identity', None) + self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None) + self.max_total_trials = kwargs.get('max_total_trials', None) + self.objective = kwargs.get('objective', None) + self.output = None + self.priority = kwargs.get('priority', None) + self.search_space = kwargs.get('search_space', None) + self.status = None + self.timeout = kwargs.get('timeout', None) + self.trial = kwargs.get('trial', None) + self.job_type = 'Sweep' + + +class SynapseSpark(Model): + """A SynapseSpark compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute. Possible values + include: 'AKS', 'AmlCompute', 'ComputeInstance', 'DataFactory', + 'VirtualMachine', 'HDInsight', 'Databricks', 'DataLakeAnalytics', + 'SynapseSpark' + :type compute_type: str or + ~azure.mgmt.machinelearningservices.models.ComputeType + :param compute_location: Location for the underlying compute + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values + are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible + values include: 'Unknown', 'Updating', 'Creating', 'Deleting', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: datetime + :param resource_id: ARM resource id of the underlying compute + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning + :vartype provisioning_errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned + by user and brought from outside if true, or machine learning service + provisioned it if false. + :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool + :param properties: AKS properties + :type properties: + ~azure.mgmt.machinelearningservices.models.SynapseSparkProperties + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + 'properties': {'key': 'properties', 'type': 'SynapseSparkProperties'}, + } + + def __init__(self, **kwargs): + super(SynapseSpark, self).__init__(**kwargs) + self.compute_type = kwargs.get('compute_type', None) + self.compute_location = kwargs.get('compute_location', None) + self.provisioning_state = None + self.description = kwargs.get('description', None) + self.created_on = None + self.modified_on = None + self.resource_id = kwargs.get('resource_id', None) + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = kwargs.get('disable_local_auth', None) + self.properties = kwargs.get('properties', None) + + +class SynapseSparkPoolProperties(Model): + """Properties specific to Synapse Spark pools. + + :param properties: AKS properties + :type properties: + ~azure.mgmt.machinelearningservices.models.SynapseSparkProperties + """ + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'SynapseSparkProperties'}, + } + + def __init__(self, **kwargs): + super(SynapseSparkPoolProperties, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class SynapseSparkProperties(Model): + """AKS properties. + + :param auto_scale_properties: Auto scale properties. + :type auto_scale_properties: + ~azure.mgmt.machinelearningservices.models.AutoScaleProperties + :param auto_pause_properties: Auto pause properties. + :type auto_pause_properties: + ~azure.mgmt.machinelearningservices.models.AutoPauseProperties + :param spark_version: Spark version. + :type spark_version: str + :param node_count: The number of compute nodes currently assigned to the + compute. + :type node_count: int + :param node_size: Node size. + :type node_size: str + :param node_size_family: Node size family. + :type node_size_family: str + :param subscription_id: Azure subscription identifier. + :type subscription_id: str + :param resource_group: Name of the resource group in which workspace is + located. + :type resource_group: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param pool_name: Pool name. + :type pool_name: str + """ + + _attribute_map = { + 'auto_scale_properties': {'key': 'autoScaleProperties', 'type': 'AutoScaleProperties'}, + 'auto_pause_properties': {'key': 'autoPauseProperties', 'type': 'AutoPauseProperties'}, + 'spark_version': {'key': 'sparkVersion', 'type': 'str'}, + 'node_count': {'key': 'nodeCount', 'type': 'int'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'node_size_family': {'key': 'nodeSizeFamily', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, + 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, + 'pool_name': {'key': 'poolName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(SynapseSparkProperties, self).__init__(**kwargs) + self.auto_scale_properties = kwargs.get('auto_scale_properties', None) + self.auto_pause_properties = kwargs.get('auto_pause_properties', None) + self.spark_version = kwargs.get('spark_version', None) + self.node_count = kwargs.get('node_count', None) + self.node_size = kwargs.get('node_size', None) + self.node_size_family = kwargs.get('node_size_family', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.resource_group = kwargs.get('resource_group', None) + self.workspace_name = kwargs.get('workspace_name', None) + self.pool_name = kwargs.get('pool_name', None) + + +class SystemData(Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. + Possible values include: 'User', 'Application', 'ManagedIdentity', 'Key' + :type created_by_type: str or + ~azure.mgmt.machinelearningservices.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the + resource. Possible values include: 'User', 'Application', + 'ManagedIdentity', 'Key' + :type last_modified_by_type: str or + ~azure.mgmt.machinelearningservices.models.CreatedByType + :param last_modified_at: The timestamp of resource last modification (UTC) + :type last_modified_at: datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__(self, **kwargs): + super(SystemData, self).__init__(**kwargs) + self.created_by = kwargs.get('created_by', None) + self.created_by_type = kwargs.get('created_by_type', None) + self.created_at = kwargs.get('created_at', None) + self.last_modified_by = kwargs.get('last_modified_by', None) + self.last_modified_by_type = kwargs.get('last_modified_by_type', None) + self.last_modified_at = kwargs.get('last_modified_at', None) + + +class SystemService(Model): + """A system service running on a compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar system_service_type: The type of this system service. + :vartype system_service_type: str + :ivar public_ip_address: Public IP address + :vartype public_ip_address: str + :ivar version: The version for this type. + :vartype version: str """ _validation = { - 'location': {'readonly': True}, - 'username': {'readonly': True}, + 'system_service_type': {'readonly': True}, + 'public_ip_address': {'readonly': True}, + 'version': {'readonly': True}, } _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'str'}, - 'passwords': {'key': 'passwords', 'type': '[Password]'}, + 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, } def __init__(self, **kwargs): - super(RegistryListCredentialsResult, self).__init__(**kwargs) - self.location = None - self.username = None - self.passwords = kwargs.get('passwords', None) + super(SystemService, self).__init__(**kwargs) + self.system_service_type = None + self.public_ip_address = None + self.version = None -class ResourceId(Model): - """Represents a resource ID. For example, for a subnet, it is the resource URL - for the subnet. +class TensorFlow(DistributionConfiguration): + """TensorFlow distribution configuration. All required parameters must be populated in order to send to Azure. - :param id: Required. The ID of the resource - :type id: str + :param distribution_type: Required. Constant filled by server. + :type distribution_type: str + :param parameter_server_count: Number of parameter server tasks. + :type parameter_server_count: int + :param worker_count: Number of workers. Overwrites the node count in + compute binding. + :type worker_count: int """ _validation = { - 'id': {'required': True}, + 'distribution_type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + 'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'}, + 'worker_count': {'key': 'workerCount', 'type': 'int'}, } def __init__(self, **kwargs): - super(ResourceId, self).__init__(**kwargs) - self.id = kwargs.get('id', None) + super(TensorFlow, self).__init__(**kwargs) + self.parameter_server_count = kwargs.get('parameter_server_count', None) + self.worker_count = kwargs.get('worker_count', None) + self.distribution_type = 'TensorFlow' -class ScaleSettings(Model): - """scale settings for AML Compute. +class TrialComponent(Model): + """Trial component definition. All required parameters must be populated in order to send to Azure. - :param max_node_count: Required. Max number of nodes to use - :type max_node_count: int - :param min_node_count: Min number of nodes to use. Default value: 0 . - :type min_node_count: int - :param node_idle_time_before_scale_down: Node Idle Time before scaling - down amlCompute - :type node_idle_time_before_scale_down: timedelta + :param code_id: ARM resource ID of the code asset. + :type code_id: str + :param command: Required. The command to execute on startup of the job. + eg. "python train.py" + :type command: str + :param distribution: Distribution configuration of the job. If set, this + should be one of Mpi, Tensorflow, PyTorch, or null. + :type distribution: + ~azure.mgmt.machinelearningservices.models.DistributionConfiguration + :param environment_id: The ARM resource ID of the Environment + specification for the job. + :type environment_id: str + :param environment_variables: Environment variables included in the job. + :type environment_variables: dict[str, str] + :param input_data_bindings: Mapping of input data bindings used in the + job. + :type input_data_bindings: dict[str, + ~azure.mgmt.machinelearningservices.models.InputDataBinding] + :param output_data_bindings: Mapping of output data bindings used in the + job. + :type output_data_bindings: dict[str, + ~azure.mgmt.machinelearningservices.models.OutputDataBinding] + :param timeout: The max run duration in ISO 8601 format, after which the + trial component will be cancelled. + Only supports duration with precision as low as Seconds. + :type timeout: timedelta """ _validation = { - 'max_node_count': {'required': True}, + 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, } _attribute_map = { - 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, - 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, - 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, + 'code_id': {'key': 'codeId', 'type': 'str'}, + 'command': {'key': 'command', 'type': 'str'}, + 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'}, + 'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, } def __init__(self, **kwargs): - super(ScaleSettings, self).__init__(**kwargs) - self.max_node_count = kwargs.get('max_node_count', None) - self.min_node_count = kwargs.get('min_node_count', 0) - self.node_idle_time_before_scale_down = kwargs.get('node_idle_time_before_scale_down', None) + super(TrialComponent, self).__init__(**kwargs) + self.code_id = kwargs.get('code_id', None) + self.command = kwargs.get('command', None) + self.distribution = kwargs.get('distribution', None) + self.environment_id = kwargs.get('environment_id', None) + self.environment_variables = kwargs.get('environment_variables', None) + self.input_data_bindings = kwargs.get('input_data_bindings', None) + self.output_data_bindings = kwargs.get('output_data_bindings', None) + self.timeout = kwargs.get('timeout', None) -class ServicePrincipalCredentials(Model): - """Service principal credentials. +class TruncationSelectionPolicy(EarlyTerminationPolicy): + """Defines an early termination policy that cancels a given percentage of runs + at each evaluation interval. All required parameters must be populated in order to send to Azure. - :param client_id: Required. Client Id - :type client_id: str - :param client_secret: Required. Client secret - :type client_secret: str + :param delay_evaluation: Number of intervals by which to delay the first + evaluation. + :type delay_evaluation: int + :param evaluation_interval: Interval (number of runs) between policy + evaluations. + :type evaluation_interval: int + :param policy_type: Required. Constant filled by server. + :type policy_type: str + :param truncation_percentage: The percentage of runs to cancel at each + evaluation interval. + :type truncation_percentage: int """ _validation = { - 'client_id': {'required': True}, - 'client_secret': {'required': True}, + 'policy_type': {'required': True}, } _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, + 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, + 'policy_type': {'key': 'policyType', 'type': 'str'}, + 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'}, } def __init__(self, **kwargs): - super(ServicePrincipalCredentials, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) + super(TruncationSelectionPolicy, self).__init__(**kwargs) + self.truncation_percentage = kwargs.get('truncation_percentage', None) + self.policy_type = 'TruncationSelection' -class SslConfiguration(Model): - """The ssl configuration for scoring. +class UpdateWorkspaceQuotas(Model): + """The properties for update Quota response. - :param status: Enable or disable ssl for scoring. Possible values include: - 'Disabled', 'Enabled' - :type status: str or ~azure.mgmt.machinelearningservices.models.enum - :param cert: Cert data - :type cert: str - :param key: Key data - :type key: str - :param cname: CNAME of the cert - :type cname: str + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar type: Specifies the resource type. + :vartype type: str + :param limit: Limit. The maximum permitted quota of the resource. + :type limit: long + :ivar unit: An enum describing the unit of quota measurement. Possible + values include: 'Count' + :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + :param status: Update Workspace Quota Status. Status of update workspace + quota. Possible values include: 'Undefined', 'Success', 'Failure', + 'InvalidQuotaBelowClusterMinimum', 'InvalidQuotaExceedsSubscriptionLimit', + 'InvalidVMFamilyName', 'OperationNotSupportedForSku', + 'OperationNotEnabledForRegion' + :type status: str or ~azure.mgmt.machinelearningservices.models.Status """ + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'unit': {'readonly': True}, + } + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, - 'cert': {'key': 'cert', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'str'}, - 'cname': {'key': 'cname', 'type': 'str'}, } def __init__(self, **kwargs): - super(SslConfiguration, self).__init__(**kwargs) + super(UpdateWorkspaceQuotas, self).__init__(**kwargs) + self.id = None + self.type = None + self.limit = kwargs.get('limit', None) + self.unit = None self.status = kwargs.get('status', None) - self.cert = kwargs.get('cert', None) - self.key = kwargs.get('key', None) - self.cname = kwargs.get('cname', None) -class SystemService(Model): - """A system service running on a compute. +class UpdateWorkspaceQuotasResult(Model): + """The result of update workspace quota. Variables are only populated by the server, and will be ignored when sending a request. - :ivar system_service_type: The type of this system service. - :vartype system_service_type: str - :ivar public_ip_address: Public IP address - :vartype public_ip_address: str - :ivar version: The version for this type. - :vartype version: str + :ivar value: The list of workspace quota update result. + :vartype value: + list[~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotas] + :ivar next_link: The URI to fetch the next page of workspace quota update + result. Call ListNext() with this to fetch the next page of Workspace + Quota update result. + :vartype next_link: str """ _validation = { - 'system_service_type': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'version': {'readonly': True}, + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, } _attribute_map = { - 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__(self, **kwargs): - super(SystemService, self).__init__(**kwargs) - self.system_service_type = None - self.public_ip_address = None - self.version = None + super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs) + self.value = None + self.next_link = None class Usage(Model): @@ -1515,6 +8508,8 @@ class Usage(Model): :ivar id: Specifies the resource ID. :vartype id: str + :ivar aml_workspace_location: Region of the AML workspace in the id. + :vartype aml_workspace_location: str :ivar type: Specifies the resource type. :vartype type: str :ivar unit: An enum describing the unit of usage measurement. Possible @@ -1530,6 +8525,7 @@ class Usage(Model): _validation = { 'id': {'readonly': True}, + 'aml_workspace_location': {'readonly': True}, 'type': {'readonly': True}, 'unit': {'readonly': True}, 'current_value': {'readonly': True}, @@ -1539,6 +8535,7 @@ class Usage(Model): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, + 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'unit': {'key': 'unit', 'type': 'str'}, 'current_value': {'key': 'currentValue', 'type': 'long'}, @@ -1549,6 +8546,7 @@ class Usage(Model): def __init__(self, **kwargs): super(Usage, self).__init__(**kwargs) self.id = None + self.aml_workspace_location = None self.type = None self.unit = None self.current_value = None @@ -1618,6 +8616,63 @@ def __init__(self, **kwargs): self.admin_user_password = kwargs.get('admin_user_password', None) +class UserAssignedIdentity(Model): + """User Assigned Identity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar principal_id: The principal ID of the user assigned identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of the user assigned identity. + :vartype tenant_id: str + :ivar client_id: The clientId(aka appId) of the user assigned identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(UserAssignedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.client_id = None + + +class UserAssignedIdentityMeta(Model): + """User assigned identities associated with a resource. + + :param client_id: Aka application ID, a unique identifier generated by + Azure AD that is tied to an application and service principal during its + initial provisioning. + :type client_id: str + :param principal_id: The object ID of the service principal object for + your managed identity that is used to grant role-based access to an Azure + resource. + :type principal_id: str + """ + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(UserAssignedIdentityMeta, self).__init__(**kwargs) + self.client_id = kwargs.get('client_id', None) + self.principal_id = kwargs.get('principal_id', None) + + class VirtualMachine(Compute): """A Machine Learning compute based on Azure Virtual Machines. @@ -1636,19 +8691,22 @@ class VirtualMachine(Compute): ~azure.mgmt.machinelearningservices.models.ProvisioningState :param description: The description of the Machine Learning compute. :type description: str - :ivar created_on: The date and time when the compute was created. + :ivar created_on: The time at which the compute was created. :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: datetime :param resource_id: ARM resource id of the underlying compute :type resource_id: str :ivar provisioning_errors: Errors during provisioning :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool :param compute_type: Required. Constant filled by server. :type compute_type: str :param properties: @@ -1672,8 +8730,9 @@ class VirtualMachine(Compute): 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'}, } @@ -1684,6 +8743,28 @@ def __init__(self, **kwargs): self.compute_type = 'VirtualMachine' +class VirtualMachineImage(Model): + """Virtual Machine image for Windows AML Compute. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. Virtual Machine image path + :type id: str + """ + + _validation = { + 'id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(VirtualMachineImage, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + + class VirtualMachineProperties(Model): """VirtualMachineProperties. @@ -1696,6 +8777,9 @@ class VirtualMachineProperties(Model): :param administrator_account: Admin credentials for virtual machine :type administrator_account: ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + :param is_notebook_instance_compute: Indicates whether this compute will + be used for running notebooks. + :type is_notebook_instance_compute: bool """ _attribute_map = { @@ -1703,6 +8787,7 @@ class VirtualMachineProperties(Model): 'ssh_port': {'key': 'sshPort', 'type': 'int'}, 'address': {'key': 'address', 'type': 'str'}, 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + 'is_notebook_instance_compute': {'key': 'isNotebookInstanceCompute', 'type': 'bool'}, } def __init__(self, **kwargs): @@ -1711,6 +8796,7 @@ def __init__(self, **kwargs): self.ssh_port = kwargs.get('ssh_port', None) self.address = kwargs.get('address', None) self.administrator_account = kwargs.get('administrator_account', None) + self.is_notebook_instance_compute = kwargs.get('is_notebook_instance_compute', None) class VirtualMachineSecrets(ComputeSecrets): @@ -1755,6 +8841,9 @@ class VirtualMachineSize(Model): :ivar v_cp_us: Number of vPUs. The number of vCPUs supported by the virtual machine size. :vartype v_cp_us: int + :ivar gpus: Number of gPUs. The number of gPUs supported by the virtual + machine size. + :vartype gpus: int :ivar os_vhd_size_mb: OS VHD Disk size. The OS VHD disk size, in MB, allowed by the virtual machine size. :vartype os_vhd_size_mb: int @@ -1770,12 +8859,17 @@ class VirtualMachineSize(Model): :ivar premium_io: Premium IO supported. Specifies if the virtual machine size supports premium IO. :vartype premium_io: bool + :param estimated_vm_prices: Estimated VM prices. The estimated price + information for using a VM. + :type estimated_vm_prices: + ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices """ _validation = { 'name': {'readonly': True}, 'family': {'readonly': True}, 'v_cp_us': {'readonly': True}, + 'gpus': {'readonly': True}, 'os_vhd_size_mb': {'readonly': True}, 'max_resource_volume_mb': {'readonly': True}, 'memory_gb': {'readonly': True}, @@ -1787,11 +8881,13 @@ class VirtualMachineSize(Model): 'name': {'key': 'name', 'type': 'str'}, 'family': {'key': 'family', 'type': 'str'}, 'v_cp_us': {'key': 'vCPUs', 'type': 'int'}, + 'gpus': {'key': 'gpus', 'type': 'int'}, 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'}, 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'}, 'memory_gb': {'key': 'memoryGB', 'type': 'float'}, 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'}, 'premium_io': {'key': 'premiumIO', 'type': 'bool'}, + 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVMPrices'}, } def __init__(self, **kwargs): @@ -1799,29 +8895,30 @@ def __init__(self, **kwargs): self.name = None self.family = None self.v_cp_us = None + self.gpus = None self.os_vhd_size_mb = None self.max_resource_volume_mb = None self.memory_gb = None self.low_priority_capable = None self.premium_io = None + self.estimated_vm_prices = kwargs.get('estimated_vm_prices', None) class VirtualMachineSizeListResult(Model): """The List Virtual Machine size operation response. - :param aml_compute: The list of virtual machine sizes supported by - AmlCompute. - :type aml_compute: + :param value: The list of virtual machine sizes supported by AmlCompute. + :type value: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] """ _attribute_map = { - 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'}, + 'value': {'key': 'value', 'type': '[VirtualMachineSize]'}, } def __init__(self, **kwargs): super(VirtualMachineSizeListResult, self).__init__(**kwargs) - self.aml_compute = kwargs.get('aml_compute', None) + self.value = kwargs.get('value', None) class VirtualMachineSshCredentials(Model): @@ -1858,18 +8955,14 @@ class Workspace(Resource): Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: Specifies the resource ID. + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} :vartype id: str - :ivar name: Specifies the name of the resource. + :ivar name: The name of the resource :vartype name: str - :ivar identity: The identity of the resource. - :vartype identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" :vartype type: str - :param tags: Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] :ivar workspace_id: The immutable id associated with this workspace. :vartype workspace_id: str :param description: The description of this workspace. @@ -1877,9 +8970,6 @@ class Workspace(Resource): :param friendly_name: The friendly name for this workspace. This name in mutable :type friendly_name: str - :ivar creation_time: The creation time of the machine learning workspace - in ISO8601 format. - :vartype creation_time: datetime :param key_vault: ARM id of the key vault associated with this workspace. This cannot be changed once the workspace has been created :type key_vault: str @@ -1903,35 +8993,98 @@ class Workspace(Resource): 'Deleting', 'Succeeded', 'Failed', 'Canceled' :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param encryption: The encryption settings of Azure ML workspace. + :type encryption: + ~azure.mgmt.machinelearningservices.models.EncryptionProperty + :param hbi_workspace: The flag to signal HBI data in the workspace and + reduce diagnostic data collected by the service. Default value: False . + :type hbi_workspace: bool + :ivar service_provisioned_resource_group: The name of the managed resource + group created by workspace RP in customer subscription if the workspace is + CMK workspace + :vartype service_provisioned_resource_group: str + :ivar private_link_count: Count of private connections in the workspace + :vartype private_link_count: int + :param image_build_compute: The compute name for image build + :type image_build_compute: str + :param allow_public_access_when_behind_vnet: The flag to indicate whether + to allow public access when behind VNet. Default value: False . + :type allow_public_access_when_behind_vnet: bool + :ivar private_endpoint_connections: The list of private endpoint + connections in the workspace. + :vartype private_endpoint_connections: + list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] + :param shared_private_link_resources: The list of shared private link + resources in this workspace. + :type shared_private_link_resources: + list[~azure.mgmt.machinelearningservices.models.SharedPrivateLinkResource] + :ivar notebook_info: The notebook info of Azure ML workspace. + :vartype notebook_info: + ~azure.mgmt.machinelearningservices.models.NotebookResourceInfo + :param service_managed_resources_settings: The service managed resource + settings. + :type service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :param primary_user_assigned_identity: The user assigned identity resource + id that represents the workspace identity. + :type primary_user_assigned_identity: str + :ivar tenant_id: The tenant id associated with this workspace. + :vartype tenant_id: str + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.machinelearningservices.models.Identity + :param location: Specifies the location of the resource. + :type location: str + :param tags: Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure.mgmt.machinelearningservices.models.Sku + :param system_data: + :type system_data: ~azure.mgmt.machinelearningservices.models.SystemData """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, - 'identity': {'readonly': True}, 'type': {'readonly': True}, 'workspace_id': {'readonly': True}, - 'creation_time': {'readonly': True}, 'provisioning_state': {'readonly': True}, + 'service_provisioned_resource_group': {'readonly': True}, + 'private_link_count': {'readonly': True}, + 'private_endpoint_connections': {'readonly': True}, + 'notebook_info': {'readonly': True}, + 'tenant_id': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, 'description': {'key': 'properties.description', 'type': 'str'}, 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, 'key_vault': {'key': 'properties.keyVault', 'type': 'str'}, 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'}, 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'}, + 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'}, + 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'}, + 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'}, + 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, + 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'}, + 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'}, + 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'}, + 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'}, + 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, } def __init__(self, **kwargs): @@ -1939,13 +9092,144 @@ def __init__(self, **kwargs): self.workspace_id = None self.description = kwargs.get('description', None) self.friendly_name = kwargs.get('friendly_name', None) - self.creation_time = None self.key_vault = kwargs.get('key_vault', None) self.application_insights = kwargs.get('application_insights', None) self.container_registry = kwargs.get('container_registry', None) self.storage_account = kwargs.get('storage_account', None) self.discovery_url = kwargs.get('discovery_url', None) self.provisioning_state = None + self.encryption = kwargs.get('encryption', None) + self.hbi_workspace = kwargs.get('hbi_workspace', False) + self.service_provisioned_resource_group = None + self.private_link_count = None + self.image_build_compute = kwargs.get('image_build_compute', None) + self.allow_public_access_when_behind_vnet = kwargs.get('allow_public_access_when_behind_vnet', False) + self.private_endpoint_connections = None + self.shared_private_link_resources = kwargs.get('shared_private_link_resources', None) + self.notebook_info = None + self.service_managed_resources_settings = kwargs.get('service_managed_resources_settings', None) + self.primary_user_assigned_identity = kwargs.get('primary_user_assigned_identity', None) + self.tenant_id = None + self.identity = kwargs.get('identity', None) + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) + self.sku = kwargs.get('sku', None) + self.system_data = kwargs.get('system_data', None) + + +class WorkspaceConnection(Model): + """Workspace connection. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: ResourceId of the workspace connection. + :vartype id: str + :ivar name: Friendly name of the workspace connection. + :vartype name: str + :ivar type: Resource type of workspace connection. + :vartype type: str + :param category: Category of the workspace connection. + :type category: str + :param target: Target of the workspace connection. + :type target: str + :param auth_type: Authorization type of the workspace connection. + :type auth_type: str + :param value: Value details of the workspace connection. + :type value: str + :param value_format: format for the workspace connection value. Possible + values include: 'JSON' + :type value_format: str or + ~azure.mgmt.machinelearningservices.models.ValueFormat + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'category': {'key': 'properties.category', 'type': 'str'}, + 'target': {'key': 'properties.target', 'type': 'str'}, + 'auth_type': {'key': 'properties.authType', 'type': 'str'}, + 'value': {'key': 'properties.value', 'type': 'str'}, + 'value_format': {'key': 'properties.valueFormat', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(WorkspaceConnection, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.category = kwargs.get('category', None) + self.target = kwargs.get('target', None) + self.auth_type = kwargs.get('auth_type', None) + self.value = kwargs.get('value', None) + self.value_format = kwargs.get('value_format', None) + + +class WorkspaceSku(Model): + """Describes Workspace Sku details and features. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar locations: The set of locations that the SKU is available. This will + be supported and registered Azure Geo Regions (e.g. West US, East US, + Southeast Asia, etc.). + :vartype locations: list[str] + :ivar location_info: A list of locations and availability zones in those + locations where the SKU is available. + :vartype location_info: + list[~azure.mgmt.machinelearningservices.models.ResourceSkuLocationInfo] + :ivar tier: Sku Tier like Basic or Enterprise + :vartype tier: str + :ivar resource_type: + :vartype resource_type: str + :ivar name: + :vartype name: str + :ivar capabilities: List of features/user capabilities associated with the + sku + :vartype capabilities: + list[~azure.mgmt.machinelearningservices.models.SKUCapability] + :param restrictions: The restrictions because of which SKU cannot be used. + This is empty if there are no restrictions. + :type restrictions: + list[~azure.mgmt.machinelearningservices.models.Restriction] + """ + + _validation = { + 'locations': {'readonly': True}, + 'location_info': {'readonly': True}, + 'tier': {'readonly': True}, + 'resource_type': {'readonly': True}, + 'name': {'readonly': True}, + 'capabilities': {'readonly': True}, + } + + _attribute_map = { + 'locations': {'key': 'locations', 'type': '[str]'}, + 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'}, + 'tier': {'key': 'tier', 'type': 'str'}, + 'resource_type': {'key': 'resourceType', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, + 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'}, + } + + def __init__(self, **kwargs): + super(WorkspaceSku, self).__init__(**kwargs) + self.locations = None + self.location_info = None + self.tier = None + self.resource_type = None + self.name = None + self.capabilities = None + self.restrictions = kwargs.get('restrictions', None) class WorkspaceUpdateParameters(Model): @@ -1953,20 +9237,43 @@ class WorkspaceUpdateParameters(Model): :param tags: The resource tags for the machine learning workspace. :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure.mgmt.machinelearningservices.models.Sku + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.machinelearningservices.models.Identity :param description: The description of this workspace. :type description: str :param friendly_name: The friendly name for this workspace. :type friendly_name: str + :param image_build_compute: The compute name for image build + :type image_build_compute: str + :param service_managed_resources_settings: The service managed resource + settings. + :type service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :param primary_user_assigned_identity: The user assigned identity resource + id that represents the workspace identity. + :type primary_user_assigned_identity: str """ _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, 'description': {'key': 'properties.description', 'type': 'str'}, 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, + 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, + 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'}, + 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'}, } def __init__(self, **kwargs): super(WorkspaceUpdateParameters, self).__init__(**kwargs) self.tags = kwargs.get('tags', None) + self.sku = kwargs.get('sku', None) + self.identity = kwargs.get('identity', None) self.description = kwargs.get('description', None) self.friendly_name = kwargs.get('friendly_name', None) + self.image_build_compute = kwargs.get('image_build_compute', None) + self.service_managed_resources_settings = kwargs.get('service_managed_resources_settings', None) + self.primary_user_assigned_identity = kwargs.get('primary_user_assigned_identity', None) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py index f173a48382ad..cebf4952fe54 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py @@ -13,12 +13,128 @@ from msrest.exceptions import HttpOperationError +class DatastoreCredentials(Model): + """Base definition for datastore credentials. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AccountKeyDatastoreCredentials, + CertificateDatastoreCredentials, NoneDatastoreCredentials, + SasDatastoreCredentials, ServicePrincipalDatastoreCredentials, + SqlAdminDatastoreCredentials + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + """ + + _validation = { + 'credentials_type': {'required': True}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + } + + _subtype_map = { + 'credentials_type': {'AccountKey': 'AccountKeyDatastoreCredentials', 'Certificate': 'CertificateDatastoreCredentials', 'None': 'NoneDatastoreCredentials', 'Sas': 'SasDatastoreCredentials', 'ServicePrincipal': 'ServicePrincipalDatastoreCredentials', 'SqlAdmin': 'SqlAdminDatastoreCredentials'} + } + + def __init__(self, **kwargs) -> None: + super(DatastoreCredentials, self).__init__(**kwargs) + self.credentials_type = None + + +class AccountKeyDatastoreCredentials(DatastoreCredentials): + """Account key datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + :param secrets: Storage account secrets. + :type secrets: + ~azure.mgmt.machinelearningservices.models.AccountKeyDatastoreSecrets + """ + + _validation = { + 'credentials_type': {'required': True}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'AccountKeyDatastoreSecrets'}, + } + + def __init__(self, *, secrets=None, **kwargs) -> None: + super(AccountKeyDatastoreCredentials, self).__init__(**kwargs) + self.secrets = secrets + self.credentials_type = 'AccountKey' + + +class DatastoreSecrets(Model): + """Base definition for datastore secrets. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AccountKeyDatastoreSecrets, CertificateDatastoreSecrets, + NoneDatastoreSecrets, SasDatastoreSecrets, + ServicePrincipalDatastoreSecrets, SqlAdminDatastoreSecrets + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + } + + _subtype_map = { + 'secrets_type': {'AccountKey': 'AccountKeyDatastoreSecrets', 'Certificate': 'CertificateDatastoreSecrets', 'None': 'NoneDatastoreSecrets', 'Sas': 'SasDatastoreSecrets', 'ServicePrincipal': 'ServicePrincipalDatastoreSecrets', 'SqlAdmin': 'SqlAdminDatastoreSecrets'} + } + + def __init__(self, **kwargs) -> None: + super(DatastoreSecrets, self).__init__(**kwargs) + self.secrets_type = None + + +class AccountKeyDatastoreSecrets(DatastoreSecrets): + """Datastore account key secrets. + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + :param key: Storage account key. + :type key: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'str'}, + } + + def __init__(self, *, key: str=None, **kwargs) -> None: + super(AccountKeyDatastoreSecrets, self).__init__(**kwargs) + self.key = key + self.secrets_type = 'AccountKey' + + class Compute(Model): """Machine Learning compute object. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AKS, AmlCompute, VirtualMachine, HDInsight, DataFactory, - Databricks, DataLakeAnalytics + sub-classes are: AKS, AmlCompute, ComputeInstance, VirtualMachine, + HDInsight, DataFactory, Databricks, DataLakeAnalytics Variables are only populated by the server, and will be ignored when sending a request. @@ -35,19 +151,22 @@ class Compute(Model): ~azure.mgmt.machinelearningservices.models.ProvisioningState :param description: The description of the Machine Learning compute. :type description: str - :ivar created_on: The date and time when the compute was created. + :ivar created_on: The time at which the compute was created. :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: datetime :param resource_id: ARM resource id of the underlying compute :type resource_id: str :ivar provisioning_errors: Errors during provisioning :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool :param compute_type: Required. Constant filled by server. :type compute_type: str """ @@ -68,16 +187,17 @@ class Compute(Model): 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, } _subtype_map = { - 'compute_type': {'AKS': 'AKS', 'AmlCompute': 'AmlCompute', 'VirtualMachine': 'VirtualMachine', 'HDInsight': 'HDInsight', 'DataFactory': 'DataFactory', 'Databricks': 'Databricks', 'DataLakeAnalytics': 'DataLakeAnalytics'} + 'compute_type': {'AKS': 'AKS', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'VirtualMachine': 'VirtualMachine', 'HDInsight': 'HDInsight', 'DataFactory': 'DataFactory', 'Databricks': 'Databricks', 'DataLakeAnalytics': 'DataLakeAnalytics'} } - def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, **kwargs) -> None: + def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, disable_local_auth: bool=None, **kwargs) -> None: super(Compute, self).__init__(**kwargs) self.compute_location = compute_location self.provisioning_state = None @@ -87,6 +207,7 @@ def __init__(self, *, compute_location: str=None, description: str=None, resourc self.resource_id = resource_id self.provisioning_errors = None self.is_attached_compute = None + self.disable_local_auth = disable_local_auth self.compute_type = None @@ -108,19 +229,22 @@ class AKS(Compute): ~azure.mgmt.machinelearningservices.models.ProvisioningState :param description: The description of the Machine Learning compute. :type description: str - :ivar created_on: The date and time when the compute was created. + :ivar created_on: The time at which the compute was created. :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: datetime :param resource_id: ARM resource id of the underlying compute :type resource_id: str :ivar provisioning_errors: Errors during provisioning :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool :param compute_type: Required. Constant filled by server. :type compute_type: str :param properties: AKS properties @@ -143,14 +267,15 @@ class AKS(Compute): 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'AKSProperties'}, } - def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, properties=None, **kwargs) -> None: - super(AKS, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) + def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, disable_local_auth: bool=None, properties=None, **kwargs) -> None: + super(AKS, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs) self.properties = properties self.compute_type = 'AKS' @@ -277,36 +402,52 @@ class AKSProperties(Model): :type agent_count: int :param agent_vm_size: Agent virtual machine size :type agent_vm_size: str + :param cluster_purpose: Intended usage of the cluster. Possible values + include: 'FastProd', 'DenseProd', 'DevTest'. Default value: "FastProd" . + :type cluster_purpose: str or + ~azure.mgmt.machinelearningservices.models.ClusterPurpose :param ssl_configuration: SSL configuration :type ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration :param aks_networking_configuration: AKS networking configuration for vnet :type aks_networking_configuration: ~azure.mgmt.machinelearningservices.models.AksNetworkingConfiguration + :param load_balancer_type: Load Balancer Type. Possible values include: + 'PublicIp', 'InternalLoadBalancer'. Default value: "PublicIp" . + :type load_balancer_type: str or + ~azure.mgmt.machinelearningservices.models.LoadBalancerType + :param load_balancer_subnet: Load Balancer Subnet + :type load_balancer_subnet: str """ _validation = { 'system_services': {'readonly': True}, - 'agent_count': {'minimum': 1}, + 'agent_count': {'minimum': 0}, } _attribute_map = { 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'}, 'system_services': {'key': 'systemServices', 'type': '[SystemService]'}, 'agent_count': {'key': 'agentCount', 'type': 'int'}, - 'agent_vm_size': {'key': 'agentVMSize', 'type': 'str'}, + 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'}, + 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'}, 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'}, 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'}, + 'load_balancer_type': {'key': 'loadBalancerType', 'type': 'str'}, + 'load_balancer_subnet': {'key': 'loadBalancerSubnet', 'type': 'str'}, } - def __init__(self, *, cluster_fqdn: str=None, agent_count: int=None, agent_vm_size: str=None, ssl_configuration=None, aks_networking_configuration=None, **kwargs) -> None: + def __init__(self, *, cluster_fqdn: str=None, agent_count: int=None, agent_vm_size: str=None, cluster_purpose="FastProd", ssl_configuration=None, aks_networking_configuration=None, load_balancer_type="PublicIp", load_balancer_subnet: str=None, **kwargs) -> None: super(AKSProperties, self).__init__(**kwargs) self.cluster_fqdn = cluster_fqdn self.system_services = None self.agent_count = agent_count self.agent_vm_size = agent_vm_size + self.cluster_purpose = cluster_purpose self.ssl_configuration = ssl_configuration self.aks_networking_configuration = aks_networking_configuration + self.load_balancer_type = load_balancer_type + self.load_balancer_subnet = load_balancer_subnet class AmlCompute(Compute): @@ -327,19 +468,22 @@ class AmlCompute(Compute): ~azure.mgmt.machinelearningservices.models.ProvisioningState :param description: The description of the Machine Learning compute. :type description: str - :ivar created_on: The date and time when the compute was created. + :ivar created_on: The time at which the compute was created. :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: datetime :param resource_id: ARM resource id of the underlying compute :type resource_id: str :ivar provisioning_errors: Errors during provisioning :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool :param compute_type: Required. Constant filled by server. :type compute_type: str :param properties: AML Compute properties @@ -363,14 +507,15 @@ class AmlCompute(Compute): 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, } - def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, properties=None, **kwargs) -> None: - super(AmlCompute, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) + def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, disable_local_auth: bool=None, properties=None, **kwargs) -> None: + super(AmlCompute, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs) self.properties = properties self.compute_type = 'AmlCompute' @@ -383,102 +528,50 @@ class AmlComputeNodeInformation(Model): :ivar node_id: Node ID. ID of the compute node. :vartype node_id: str - :ivar ip_address: IP address. Public IP address of the compute node. - :vartype ip_address: str + :ivar private_ip_address: Private IP address. Private IP address of the + compute node. + :vartype private_ip_address: str + :ivar public_ip_address: Public IP address. Public IP address of the + compute node. + :vartype public_ip_address: str :ivar port: Port. SSH port number of the node. :vartype port: float + :ivar node_state: State of the compute node. Values are idle, running, + preparing, unusable, leaving and preempted. Possible values include: + 'idle', 'running', 'preparing', 'unusable', 'leaving', 'preempted' + :vartype node_state: str or + ~azure.mgmt.machinelearningservices.models.NodeState + :ivar run_id: Run ID. ID of the Experiment running on the node, if any + else null. + :vartype run_id: str """ _validation = { 'node_id': {'readonly': True}, - 'ip_address': {'readonly': True}, + 'private_ip_address': {'readonly': True}, + 'public_ip_address': {'readonly': True}, 'port': {'readonly': True}, + 'node_state': {'readonly': True}, + 'run_id': {'readonly': True}, } _attribute_map = { 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, 'port': {'key': 'port', 'type': 'float'}, + 'node_state': {'key': 'nodeState', 'type': 'str'}, + 'run_id': {'key': 'runId', 'type': 'str'}, } def __init__(self, **kwargs) -> None: super(AmlComputeNodeInformation, self).__init__(**kwargs) self.node_id = None - self.ip_address = None + self.private_ip_address = None + self.public_ip_address = None self.port = None - - -class ComputeNodesInformation(Model): - """Compute nodes information related to a Machine Learning compute. Might - differ for every type of compute. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmlComputeNodesInformation - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar next_link: The continuation token. - :vartype next_link: str - :param compute_type: Required. Constant filled by server. - :type compute_type: str - """ - - _validation = { - 'next_link': {'readonly': True}, - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - } - - _subtype_map = { - 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'} - } - - def __init__(self, **kwargs) -> None: - super(ComputeNodesInformation, self).__init__(**kwargs) - self.next_link = None - self.compute_type = None - - -class AmlComputeNodesInformation(ComputeNodesInformation): - """Compute node information related to a AmlCompute. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar next_link: The continuation token. - :vartype next_link: str - :param compute_type: Required. Constant filled by server. - :type compute_type: str - :ivar nodes: The collection of returned AmlCompute nodes details. - :vartype nodes: - list[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] - """ - - _validation = { - 'next_link': {'readonly': True}, - 'compute_type': {'required': True}, - 'nodes': {'readonly': True}, - } - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'}, - } - - def __init__(self, **kwargs) -> None: - super(AmlComputeNodesInformation, self).__init__(**kwargs) - self.nodes = None - self.compute_type = 'AmlCompute' + self.node_state = None + self.run_id = None class AmlComputeProperties(Model): @@ -487,12 +580,21 @@ class AmlComputeProperties(Model): Variables are only populated by the server, and will be ignored when sending a request. + :param os_type: Compute OS Type. Possible values include: 'Linux', + 'Windows'. Default value: "Linux" . + :type os_type: str or ~azure.mgmt.machinelearningservices.models.OsType :param vm_size: Virtual Machine Size :type vm_size: str :param vm_priority: Virtual Machine priority. Possible values include: 'Dedicated', 'LowPriority' :type vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority + :param virtual_machine_image: Virtual Machine image for AML Compute - + windows only + :type virtual_machine_image: + ~azure.mgmt.machinelearningservices.models.VirtualMachineImage + :param isolated_network: Network is isolated or not + :type isolated_network: bool :param scale_settings: Scale settings for AML Compute :type scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings @@ -503,6 +605,17 @@ class AmlComputeProperties(Model): :param subnet: Subnet. Virtual network subnet resource ID the compute nodes belong to. :type subnet: ~azure.mgmt.machinelearningservices.models.ResourceId + :param remote_login_port_public_access: Close remote Login Access Port. + State of the public SSH port. Possible values are: Disabled - Indicates + that the public ssh port is closed on all nodes of the cluster. Enabled - + Indicates that the public ssh port is open on all nodes of the cluster. + NotSpecified - Indicates that the public ssh port is closed on all nodes + of the cluster if VNet is defined, else is open all public nodes. It can + be default only during cluster creation time, after creation it will be + either enabled or disabled. Possible values include: 'Enabled', + 'Disabled', 'NotSpecified'. Default value: "NotSpecified" . + :type remote_login_port_public_access: str or + ~azure.mgmt.machinelearningservices.models.RemoteLoginPortPublicAccess :ivar allocation_state: Allocation state. Allocation state of the compute. Possible values are: steady - Indicates that the compute is not resizing. There are no changes to the number of compute nodes in the compute in @@ -519,7 +632,7 @@ class AmlComputeProperties(Model): :ivar errors: Errors. Collection of errors encountered by various compute nodes during node setup. :vartype errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar current_node_count: Current node count. The number of compute nodes currently assigned to the compute. :vartype current_node_count: int @@ -533,6 +646,12 @@ class AmlComputeProperties(Model): on the compute. :vartype node_state_counts: ~azure.mgmt.machinelearningservices.models.NodeStateCounts + :param enable_node_public_ip: Enable node public IP. Enable or disable + node public IP address provisioning. Possible values are: Possible values + are: true - Indicates that the compute nodes will have public IPs + provisioned. false - Indicates that the compute nodes will have a private + endpoint and no public IPs. Default value: True . + :type enable_node_public_ip: bool """ _validation = { @@ -545,966 +664,7840 @@ class AmlComputeProperties(Model): } _attribute_map = { + 'os_type': {'key': 'osType', 'type': 'str'}, 'vm_size': {'key': 'vmSize', 'type': 'str'}, 'vm_priority': {'key': 'vmPriority', 'type': 'str'}, + 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'}, + 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'}, 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'}, 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, + 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'}, 'allocation_state': {'key': 'allocationState', 'type': 'str'}, 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'}, - 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'}, + 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'}, 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'}, + 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'}, } - def __init__(self, *, vm_size: str=None, vm_priority=None, scale_settings=None, user_account_credentials=None, subnet=None, **kwargs) -> None: + def __init__(self, *, os_type="Linux", vm_size: str=None, vm_priority=None, virtual_machine_image=None, isolated_network: bool=None, scale_settings=None, user_account_credentials=None, subnet=None, remote_login_port_public_access="NotSpecified", enable_node_public_ip: bool=True, **kwargs) -> None: super(AmlComputeProperties, self).__init__(**kwargs) + self.os_type = os_type self.vm_size = vm_size self.vm_priority = vm_priority + self.virtual_machine_image = virtual_machine_image + self.isolated_network = isolated_network self.scale_settings = scale_settings self.user_account_credentials = user_account_credentials self.subnet = subnet + self.remote_login_port_public_access = remote_login_port_public_access self.allocation_state = None self.allocation_state_transition_time = None self.errors = None self.current_node_count = None self.target_node_count = None self.node_state_counts = None + self.enable_node_public_ip = enable_node_public_ip -class CloudError(Model): - """CloudError. +class IdentityConfiguration(Model): + """Base definition for identity configuration. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmlToken, ManagedIdentity + + All required parameters must be populated in order to send to Azure. + + :param identity_type: Required. Constant filled by server. + :type identity_type: str """ + _validation = { + 'identity_type': {'required': True}, + } + _attribute_map = { + 'identity_type': {'key': 'identityType', 'type': 'str'}, } + _subtype_map = { + 'identity_type': {'AMLToken': 'AmlToken', 'Managed': 'ManagedIdentity'} + } -class ClusterUpdateParameters(Model): - """AmlCompute update parameters. + def __init__(self, **kwargs) -> None: + super(IdentityConfiguration, self).__init__(**kwargs) + self.identity_type = None - :param scale_settings: Scale settings. Desired scale settings for the - amlCompute. - :type scale_settings: - ~azure.mgmt.machinelearningservices.models.ScaleSettings + +class AmlToken(IdentityConfiguration): + """AML Token identity configuration. + + All required parameters must be populated in order to send to Azure. + + :param identity_type: Required. Constant filled by server. + :type identity_type: str + """ + + _validation = { + 'identity_type': {'required': True}, + } + + _attribute_map = { + 'identity_type': {'key': 'identityType', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(AmlToken, self).__init__(**kwargs) + self.identity_type = 'AMLToken' + + +class AmlUserFeature(Model): + """Features enabled for a workspace. + + :param id: Specifies the feature ID + :type id: str + :param display_name: Specifies the feature name + :type display_name: str + :param description: Describes the feature for user experience + :type description: str """ _attribute_map = { - 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'}, + 'id': {'key': 'id', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, } - def __init__(self, *, scale_settings=None, **kwargs) -> None: - super(ClusterUpdateParameters, self).__init__(**kwargs) - self.scale_settings = scale_settings + def __init__(self, *, id: str=None, display_name: str=None, description: str=None, **kwargs) -> None: + super(AmlUserFeature, self).__init__(**kwargs) + self.id = id + self.display_name = display_name + self.description = description -class Resource(Model): - """Azure Resource Manager resource envelope. +class AssetReferenceBase(Model): + """Base definition for asset references. - Variables are only populated by the server, and will be ignored when - sending a request. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DataPathAssetReference, IdAssetReference, + OutputPathAssetReference - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :ivar identity: The identity of the resource. - :vartype identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] + All required parameters must be populated in order to send to Azure. + + :param reference_type: Required. Constant filled by server. + :type reference_type: str """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'identity': {'readonly': True}, - 'type': {'readonly': True}, + 'reference_type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, } - def __init__(self, *, location: str=None, tags=None, **kwargs) -> None: - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.identity = None - self.location = location - self.type = None - self.tags = tags + _subtype_map = { + 'reference_type': {'DataPath': 'DataPathAssetReference', 'Id': 'IdAssetReference', 'OutputPath': 'OutputPathAssetReference'} + } + def __init__(self, **kwargs) -> None: + super(AssetReferenceBase, self).__init__(**kwargs) + self.reference_type = None -class ComputeResource(Resource): - """Machine Learning compute object wrapped into ARM resource envelope. - Variables are only populated by the server, and will be ignored when - sending a request. +class AssignedUser(Model): + """A user that can be assigned to a compute instance. - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar name: Specifies the name of the resource. - :vartype name: str - :ivar identity: The identity of the resource. - :vartype identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. - :vartype type: str - :param tags: Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] - :param properties: Compute properties - :type properties: ~azure.mgmt.machinelearningservices.models.Compute + All required parameters must be populated in order to send to Azure. + + :param object_id: Required. User’s AAD Object Id. + :type object_id: str + :param tenant_id: Required. User’s AAD Tenant Id. + :type tenant_id: str """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'identity': {'readonly': True}, - 'type': {'readonly': True}, + 'object_id': {'required': True}, + 'tenant_id': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': 'Compute'}, + 'object_id': {'key': 'objectId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, } - def __init__(self, *, location: str=None, tags=None, properties=None, **kwargs) -> None: - super(ComputeResource, self).__init__(location=location, tags=tags, **kwargs) - self.properties = properties + def __init__(self, *, object_id: str, tenant_id: str, **kwargs) -> None: + super(AssignedUser, self).__init__(**kwargs) + self.object_id = object_id + self.tenant_id = tenant_id -class Databricks(Compute): - """A DataFactory compute. +class AutoPauseProperties(Model): + """Auto pause properties. - Variables are only populated by the server, and will be ignored when - sending a request. + :param delay_in_minutes: + :type delay_in_minutes: int + :param enabled: + :type enabled: bool + """ - All required parameters must be populated in order to send to Azure. + _attribute_map = { + 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + } - :param compute_location: Location for the underlying compute - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values - are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible - values include: 'Unknown', 'Updating', 'Creating', 'Deleting', - 'Succeeded', 'Failed', 'Canceled' - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: datetime - :param resource_id: ARM resource id of the underlying compute - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned - by user and brought from outside if true, or machine learning service - provisioned it if false. - :vartype is_attached_compute: bool - :param compute_type: Required. Constant filled by server. - :type compute_type: str - :param properties: - :type properties: - ~azure.mgmt.machinelearningservices.models.DatabricksProperties - """ + def __init__(self, *, delay_in_minutes: int=None, enabled: bool=None, **kwargs) -> None: + super(AutoPauseProperties, self).__init__(**kwargs) + self.delay_in_minutes = delay_in_minutes + self.enabled = enabled - _validation = { - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - 'compute_type': {'required': True}, - } + +class AutoScaleProperties(Model): + """Auto scale properties. + + :param min_node_count: + :type min_node_count: int + :param enabled: + :type enabled: bool + :param max_node_count: + :type max_node_count: int + """ _attribute_map = { - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, + 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, } - def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, properties=None, **kwargs) -> None: - super(Databricks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.properties = properties - self.compute_type = 'Databricks' + def __init__(self, *, min_node_count: int=None, enabled: bool=None, max_node_count: int=None, **kwargs) -> None: + super(AutoScaleProperties, self).__init__(**kwargs) + self.min_node_count = min_node_count + self.enabled = enabled + self.max_node_count = max_node_count -class DatabricksComputeSecrets(ComputeSecrets): - """Secrets related to a Machine Learning compute based on Databricks. +class OnlineScaleSettings(Model): + """Online deployment scaling configuration. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AutoScaleSettings, ManualScaleSettings All required parameters must be populated in order to send to Azure. - :param compute_type: Required. Constant filled by server. - :type compute_type: str - :param databricks_access_token: access token for databricks account. - :type databricks_access_token: str + :param max_instances: Maximum number of instances for this deployment. + :type max_instances: int + :param min_instances: Minimum number of instances for this deployment. + :type min_instances: int + :param scale_type: Required. Constant filled by server. + :type scale_type: str """ _validation = { - 'compute_type': {'required': True}, + 'scale_type': {'required': True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + 'max_instances': {'key': 'maxInstances', 'type': 'int'}, + 'min_instances': {'key': 'minInstances', 'type': 'int'}, + 'scale_type': {'key': 'scaleType', 'type': 'str'}, } - def __init__(self, *, databricks_access_token: str=None, **kwargs) -> None: - super(DatabricksComputeSecrets, self).__init__(**kwargs) - self.databricks_access_token = databricks_access_token - self.compute_type = 'Databricks' + _subtype_map = { + 'scale_type': {'Auto': 'AutoScaleSettings', 'Manual': 'ManualScaleSettings'} + } + def __init__(self, *, max_instances: int=None, min_instances: int=None, **kwargs) -> None: + super(OnlineScaleSettings, self).__init__(**kwargs) + self.max_instances = max_instances + self.min_instances = min_instances + self.scale_type = None -class DatabricksProperties(Model): - """DatabricksProperties. - :param databricks_access_token: Databricks access token - :type databricks_access_token: str +class AutoScaleSettings(OnlineScaleSettings): + """AutoScaleSettings. + + All required parameters must be populated in order to send to Azure. + + :param max_instances: Maximum number of instances for this deployment. + :type max_instances: int + :param min_instances: Minimum number of instances for this deployment. + :type min_instances: int + :param scale_type: Required. Constant filled by server. + :type scale_type: str + :param polling_interval: The polling interval in ISO 8691 format. Only + supports duration with precision as low as Seconds. + :type polling_interval: timedelta + :param target_utilization_percentage: Target CPU usage for the autoscaler. + :type target_utilization_percentage: int """ + _validation = { + 'scale_type': {'required': True}, + } + _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + 'max_instances': {'key': 'maxInstances', 'type': 'int'}, + 'min_instances': {'key': 'minInstances', 'type': 'int'}, + 'scale_type': {'key': 'scaleType', 'type': 'str'}, + 'polling_interval': {'key': 'pollingInterval', 'type': 'duration'}, + 'target_utilization_percentage': {'key': 'targetUtilizationPercentage', 'type': 'int'}, } - def __init__(self, *, databricks_access_token: str=None, **kwargs) -> None: - super(DatabricksProperties, self).__init__(**kwargs) - self.databricks_access_token = databricks_access_token + def __init__(self, *, max_instances: int=None, min_instances: int=None, polling_interval=None, target_utilization_percentage: int=None, **kwargs) -> None: + super(AutoScaleSettings, self).__init__(max_instances=max_instances, min_instances=min_instances, **kwargs) + self.polling_interval = polling_interval + self.target_utilization_percentage = target_utilization_percentage + self.scale_type = 'Auto' -class DataFactory(Compute): - """A DataFactory compute. +class DatastoreContents(Model): + """Base definition for datastore contents configuration. - Variables are only populated by the server, and will be ignored when - sending a request. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureBlobContents, AzureDataLakeGen1Contents, + AzureDataLakeGen2Contents, AzureFileContents, AzurePostgreSqlContents, + AzureSqlDatabaseContents, GlusterFsContents All required parameters must be populated in order to send to Azure. - :param compute_location: Location for the underlying compute - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values - are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible - values include: 'Unknown', 'Updating', 'Creating', 'Deleting', - 'Succeeded', 'Failed', 'Canceled' - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: datetime - :param resource_id: ARM resource id of the underlying compute - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned - by user and brought from outside if true, or machine learning service - provisioned it if false. - :vartype is_attached_compute: bool - :param compute_type: Required. Constant filled by server. - :type compute_type: str + :param contents_type: Required. Constant filled by server. + :type contents_type: str """ _validation = { - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - 'compute_type': {'required': True}, + 'contents_type': {'required': True}, } _attribute_map = { - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, } - def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, **kwargs) -> None: - super(DataFactory, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.compute_type = 'DataFactory' + _subtype_map = { + 'contents_type': {'AzureBlob': 'AzureBlobContents', 'AzureDataLakeGen1': 'AzureDataLakeGen1Contents', 'AzureDataLakeGen2': 'AzureDataLakeGen2Contents', 'AzureFile': 'AzureFileContents', 'AzurePostgreSql': 'AzurePostgreSqlContents', 'AzureSqlDatabase': 'AzureSqlDatabaseContents', 'GlusterFs': 'GlusterFsContents'} + } + def __init__(self, **kwargs) -> None: + super(DatastoreContents, self).__init__(**kwargs) + self.contents_type = None -class DataLakeAnalytics(Compute): - """A DataLakeAnalytics compute. - Variables are only populated by the server, and will be ignored when - sending a request. +class AzureBlobContents(DatastoreContents): + """Azure Blob datastore configuration. All required parameters must be populated in order to send to Azure. - :param compute_location: Location for the underlying compute - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values - are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible - values include: 'Unknown', 'Updating', 'Creating', 'Deleting', - 'Succeeded', 'Failed', 'Canceled' - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: datetime - :param resource_id: ARM resource id of the underlying compute - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned - by user and brought from outside if true, or machine learning service - provisioned it if false. - :vartype is_attached_compute: bool - :param compute_type: Required. Constant filled by server. - :type compute_type: str - :param properties: - :type properties: - ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsProperties + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param account_name: Required. Storage account name. + :type account_name: str + :param container_name: Required. Storage account container name. + :type container_name: str + :param credentials: Required. Account credentials. + :type credentials: + ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :param endpoint: Required. Azure cloud endpoint for the storage account. + :type endpoint: str + :param protocol: Required. Protocol used to communicate with the storage + account. + :type protocol: str """ _validation = { - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - 'compute_type': {'required': True}, + 'contents_type': {'required': True}, + 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'credentials': {'required': True}, + 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, } _attribute_map = { - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'protocol': {'key': 'protocol', 'type': 'str'}, } - def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, properties=None, **kwargs) -> None: - super(DataLakeAnalytics, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.properties = properties - self.compute_type = 'DataLakeAnalytics' + def __init__(self, *, account_name: str, container_name: str, credentials, endpoint: str, protocol: str, **kwargs) -> None: + super(AzureBlobContents, self).__init__(**kwargs) + self.account_name = account_name + self.container_name = container_name + self.credentials = credentials + self.endpoint = endpoint + self.protocol = protocol + self.contents_type = 'AzureBlob' -class DataLakeAnalyticsProperties(Model): - """DataLakeAnalyticsProperties. +class AzureDataLakeGen1Contents(DatastoreContents): + """Azure Data Lake Gen1 datastore configuration. - :param data_lake_store_account_name: DataLake Store Account Name - :type data_lake_store_account_name: str + All required parameters must be populated in order to send to Azure. + + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param credentials: Required. Account credentials. + :type credentials: + ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :param store_name: Required. Azure Data Lake store name. + :type store_name: str """ + _validation = { + 'contents_type': {'required': True}, + 'credentials': {'required': True}, + 'store_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + _attribute_map = { - 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'store_name': {'key': 'storeName', 'type': 'str'}, } - def __init__(self, *, data_lake_store_account_name: str=None, **kwargs) -> None: - super(DataLakeAnalyticsProperties, self).__init__(**kwargs) - self.data_lake_store_account_name = data_lake_store_account_name + def __init__(self, *, credentials, store_name: str, **kwargs) -> None: + super(AzureDataLakeGen1Contents, self).__init__(**kwargs) + self.credentials = credentials + self.store_name = store_name + self.contents_type = 'AzureDataLakeGen1' -class ErrorDetail(Model): - """Error detail information. +class AzureDataLakeGen2Contents(DatastoreContents): + """Azure Data Lake Gen2 datastore configuration. All required parameters must be populated in order to send to Azure. - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param account_name: Required. Storage account name. + :type account_name: str + :param container_name: Required. Storage account container name. + :type container_name: str + :param credentials: Required. Account credentials. + :type credentials: + ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :param endpoint: Required. Azure cloud endpoint for the storage account. + :type endpoint: str + :param protocol: Required. Protocol used to communicate with the storage + account. + :type protocol: str """ _validation = { - 'code': {'required': True}, - 'message': {'required': True}, + 'contents_type': {'required': True}, + 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'credentials': {'required': True}, + 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'protocol': {'key': 'protocol', 'type': 'str'}, } - def __init__(self, *, code: str, message: str, **kwargs) -> None: - super(ErrorDetail, self).__init__(**kwargs) - self.code = code - self.message = message + def __init__(self, *, account_name: str, container_name: str, credentials, endpoint: str, protocol: str, **kwargs) -> None: + super(AzureDataLakeGen2Contents, self).__init__(**kwargs) + self.account_name = account_name + self.container_name = container_name + self.credentials = credentials + self.endpoint = endpoint + self.protocol = protocol + self.contents_type = 'AzureDataLakeGen2' -class ErrorResponse(Model): - """Error response information. +class Resource(Model): + """Resource. + + Common fields that are returned in the response for all Azure Resource + Manager resources. Variables are only populated by the server, and will be ignored when sending a request. - :ivar code: Error code. - :vartype code: str - :ivar message: Error message. - :vartype message: str - :ivar details: An array of error detail objects. - :vartype details: - list[~azure.mgmt.machinelearningservices.models.ErrorDetail] + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - 'details': {'readonly': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, } def __init__(self, **kwargs) -> None: - super(ErrorResponse, self).__init__(**kwargs) - self.code = None - self.message = None - self.details = None + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None -class HDInsight(Compute): - """A HDInsight compute. +class AzureEntityResource(Resource): + """Entity Resource. + + The resource model definition for an Azure Resource Manager resource with + an etag. Variables are only populated by the server, and will be ignored when sending a request. - All required parameters must be populated in order to send to Azure. - - :param compute_location: Location for the underlying compute - :type compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values - are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible - values include: 'Unknown', 'Updating', 'Creating', 'Deleting', - 'Succeeded', 'Failed', 'Canceled' - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :param description: The description of the Machine Learning compute. - :type description: str - :ivar created_on: The date and time when the compute was created. - :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. - :vartype modified_on: datetime - :param resource_id: ARM resource id of the underlying compute - :type resource_id: str - :ivar provisioning_errors: Errors during provisioning - :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] - :ivar is_attached_compute: Indicating whether the compute was provisioned - by user and brought from outside if true, or machine learning service - provisioned it if false. - :vartype is_attached_compute: bool - :param compute_type: Required. Constant filled by server. - :type compute_type: str - :param properties: - :type properties: - ~azure.mgmt.machinelearningservices.models.HDInsightProperties + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str """ _validation = { - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - 'compute_type': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, } _attribute_map = { - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, } - def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, properties=None, **kwargs) -> None: - super(HDInsight, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) - self.properties = properties - self.compute_type = 'HDInsight' + def __init__(self, **kwargs) -> None: + super(AzureEntityResource, self).__init__(**kwargs) + self.etag = None -class HDInsightProperties(Model): - """HDInsightProperties. +class AzureFileContents(DatastoreContents): + """Azure File datastore configuration. - :param ssh_port: Port open for ssh connections on the master node of the - cluster. - :type ssh_port: int - :param address: Public IP address of the master node of the cluster. - :type address: str - :param administrator_account: Admin credentials for master node of the - cluster - :type administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + All required parameters must be populated in order to send to Azure. + + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param account_name: Required. Storage account name. + :type account_name: str + :param container_name: Required. Storage account container name. + :type container_name: str + :param credentials: Required. Account credentials. + :type credentials: + ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :param endpoint: Required. Azure cloud endpoint for the storage account. + :type endpoint: str + :param protocol: Required. Protocol used to communicate with the storage + account. + :type protocol: str """ + _validation = { + 'contents_type': {'required': True}, + 'account_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'container_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'credentials': {'required': True}, + 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'protocol': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + _attribute_map = { - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'protocol': {'key': 'protocol', 'type': 'str'}, } - def __init__(self, *, ssh_port: int=None, address: str=None, administrator_account=None, **kwargs) -> None: - super(HDInsightProperties, self).__init__(**kwargs) - self.ssh_port = ssh_port - self.address = address - self.administrator_account = administrator_account + def __init__(self, *, account_name: str, container_name: str, credentials, endpoint: str, protocol: str, **kwargs) -> None: + super(AzureFileContents, self).__init__(**kwargs) + self.account_name = account_name + self.container_name = container_name + self.credentials = credentials + self.endpoint = endpoint + self.protocol = protocol + self.contents_type = 'AzureFile' -class Identity(Model): - """Identity for the resource. +class AzurePostgreSqlContents(DatastoreContents): + """Azure Postgre SQL datastore configuration. - Variables are only populated by the server, and will be ignored when - sending a request. + All required parameters must be populated in order to send to Azure. - :ivar principal_id: The principal ID of resource identity. - :vartype principal_id: str - :ivar tenant_id: The tenant ID of resource. - :vartype tenant_id: str - :param type: The identity type. Possible values include: 'SystemAssigned' - :type type: str or - ~azure.mgmt.machinelearningservices.models.ResourceIdentityType + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param credentials: Required. Account credentials. + :type credentials: + ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :param database_name: Required. Azure SQL database name. + :type database_name: str + :param enable_ssl: Whether the Azure PostgreSQL server requires SSL. + :type enable_ssl: bool + :param endpoint: Required. Azure cloud endpoint for the database. + :type endpoint: str + :param port_number: Required. Azure SQL server port. + :type port_number: int + :param server_name: Required. Azure SQL server name. + :type server_name: str """ _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, + 'contents_type': {'required': True}, + 'credentials': {'required': True}, + 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'port_number': {'required': True}, + 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, } _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'ResourceIdentityType'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'enable_ssl': {'key': 'enableSSL', 'type': 'bool'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'port_number': {'key': 'portNumber', 'type': 'int'}, + 'server_name': {'key': 'serverName', 'type': 'str'}, } - def __init__(self, *, type=None, **kwargs) -> None: - super(Identity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None - self.type = type + def __init__(self, *, credentials, database_name: str, endpoint: str, port_number: int, server_name: str, enable_ssl: bool=None, **kwargs) -> None: + super(AzurePostgreSqlContents, self).__init__(**kwargs) + self.credentials = credentials + self.database_name = database_name + self.enable_ssl = enable_ssl + self.endpoint = endpoint + self.port_number = port_number + self.server_name = server_name + self.contents_type = 'AzurePostgreSql' -class ListWorkspaceKeysResult(Model): - """ListWorkspaceKeysResult. +class AzureSqlDatabaseContents(DatastoreContents): + """Azure SQL Database datastore configuration. - Variables are only populated by the server, and will be ignored when - sending a request. + All required parameters must be populated in order to send to Azure. - :ivar user_storage_key: - :vartype user_storage_key: str - :ivar user_storage_resource_id: - :vartype user_storage_resource_id: str - :ivar app_insights_instrumentation_key: - :vartype app_insights_instrumentation_key: str - :ivar container_registry_credentials: - :vartype container_registry_credentials: - ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param credentials: Required. Account credentials. + :type credentials: + ~azure.mgmt.machinelearningservices.models.DatastoreCredentials + :param database_name: Required. Azure SQL database name. + :type database_name: str + :param endpoint: Required. Azure cloud endpoint for the database. + :type endpoint: str + :param port_number: Required. Azure SQL server port. + :type port_number: int + :param server_name: Required. Azure SQL server name. + :type server_name: str """ _validation = { - 'user_storage_key': {'readonly': True}, - 'user_storage_resource_id': {'readonly': True}, - 'app_insights_instrumentation_key': {'readonly': True}, - 'container_registry_credentials': {'readonly': True}, + 'contents_type': {'required': True}, + 'credentials': {'required': True}, + 'database_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'endpoint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'port_number': {'required': True}, + 'server_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, } _attribute_map = { - 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, - 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'}, - 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, - 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'}, + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, + 'database_name': {'key': 'databaseName', 'type': 'str'}, + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'port_number': {'key': 'portNumber', 'type': 'int'}, + 'server_name': {'key': 'serverName', 'type': 'str'}, } - def __init__(self, **kwargs) -> None: - super(ListWorkspaceKeysResult, self).__init__(**kwargs) - self.user_storage_key = None - self.user_storage_resource_id = None - self.app_insights_instrumentation_key = None - self.container_registry_credentials = None + def __init__(self, *, credentials, database_name: str, endpoint: str, port_number: int, server_name: str, **kwargs) -> None: + super(AzureSqlDatabaseContents, self).__init__(**kwargs) + self.credentials = credentials + self.database_name = database_name + self.endpoint = endpoint + self.port_number = port_number + self.server_name = server_name + self.contents_type = 'AzureSqlDatabase' -class MachineLearningServiceError(Model): - """Wrapper for error response to follow ARM guidelines. +class EarlyTerminationPolicy(Model): + """Early termination policies enable canceling poor-performing runs before + they complete. - Variables are only populated by the server, and will be ignored when - sending a request. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BanditPolicy, MedianStoppingPolicy, + TruncationSelectionPolicy + + All required parameters must be populated in order to send to Azure. - :ivar error: The error response. - :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse + :param delay_evaluation: Number of intervals by which to delay the first + evaluation. + :type delay_evaluation: int + :param evaluation_interval: Interval (number of runs) between policy + evaluations. + :type evaluation_interval: int + :param policy_type: Required. Constant filled by server. + :type policy_type: str """ _validation = { - 'error': {'readonly': True}, + 'policy_type': {'required': True}, } _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorResponse'}, + 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, + 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, + 'policy_type': {'key': 'policyType', 'type': 'str'}, } - def __init__(self, **kwargs) -> None: - super(MachineLearningServiceError, self).__init__(**kwargs) - self.error = None + _subtype_map = { + 'policy_type': {'Bandit': 'BanditPolicy', 'MedianStopping': 'MedianStoppingPolicy', 'TruncationSelection': 'TruncationSelectionPolicy'} + } + def __init__(self, *, delay_evaluation: int=None, evaluation_interval: int=None, **kwargs) -> None: + super(EarlyTerminationPolicy, self).__init__(**kwargs) + self.delay_evaluation = delay_evaluation + self.evaluation_interval = evaluation_interval + self.policy_type = None -class MachineLearningServiceErrorException(HttpOperationError): - """Server responsed with exception of type: 'MachineLearningServiceError'. - :param deserialize: A deserializer - :param response: Server response to be deserialized. +class BanditPolicy(EarlyTerminationPolicy): + """Defines an early termination policy based on slack criteria, and a + frequency and delay interval for evaluation. + + All required parameters must be populated in order to send to Azure. + + :param delay_evaluation: Number of intervals by which to delay the first + evaluation. + :type delay_evaluation: int + :param evaluation_interval: Interval (number of runs) between policy + evaluations. + :type evaluation_interval: int + :param policy_type: Required. Constant filled by server. + :type policy_type: str + :param slack_amount: Absolute distance allowed from the best performing + run. + :type slack_amount: float + :param slack_factor: Ratio of the allowed distance from the best + performing run. + :type slack_factor: float """ - def __init__(self, deserialize, response, *args): + _validation = { + 'policy_type': {'required': True}, + } + + _attribute_map = { + 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, + 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, + 'policy_type': {'key': 'policyType', 'type': 'str'}, + 'slack_amount': {'key': 'slackAmount', 'type': 'float'}, + 'slack_factor': {'key': 'slackFactor', 'type': 'float'}, + } - super(MachineLearningServiceErrorException, self).__init__(deserialize, response, 'MachineLearningServiceError', *args) + def __init__(self, *, delay_evaluation: int=None, evaluation_interval: int=None, slack_amount: float=None, slack_factor: float=None, **kwargs) -> None: + super(BanditPolicy, self).__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) + self.slack_amount = slack_amount + self.slack_factor = slack_factor + self.policy_type = 'Bandit' -class NodeStateCounts(Model): - """Counts of various compute node states on the amlCompute. +class BatchDeployment(Model): + """Batch inference settings per deployment. + + :param code_configuration: Code configuration for the endpoint deployment. + :type code_configuration: + ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :param compute: Configuration for compute binding. + :type compute: + ~azure.mgmt.machinelearningservices.models.ComputeConfiguration + :param description: Description of the endpoint deployment. + :type description: str + :param environment_id: ARM resource ID of the environment specification + for the endpoint deployment. + :type environment_id: str + :param environment_variables: Environment variables configuration for the + deployment. + :type environment_variables: dict[str, str] + :param error_threshold: Error threshold, if the error count for the entire + input goes above this value, + the batch inference will be aborted. Range is [-1, int.MaxValue]. + For FileDataset, this value is the count of file failures. + For TabularDataset, this value is the count of record failures. + If set to -1 (the lower bound), all failures during batch inference will + be ignored. + :type error_threshold: int + :param logging_level: Logging level for batch inference operation. + Possible values include: 'Info', 'Warning', 'Debug' + :type logging_level: str or + ~azure.mgmt.machinelearningservices.models.BatchLoggingLevel + :param mini_batch_size: Size of the mini-batch passed to each batch + invocation. + For FileDataset, this is the number of files per mini-batch. + For TabularDataset, this is the size of the records in bytes, per + mini-batch. + :type mini_batch_size: long + :param model: Reference to the model asset for the endpoint deployment. + :type model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase + :param output_configuration: Output configuration for the batch inference + operation. + :type output_configuration: + ~azure.mgmt.machinelearningservices.models.BatchOutputConfiguration + :param partition_keys: Partition keys list used for Named partitioning. + :type partition_keys: list[str] + :param properties: Property dictionary. Properties can be added, but not + removed or altered. + :type properties: dict[str, str] + :param retry_settings: Retry Settings for the batch inference operation. + :type retry_settings: + ~azure.mgmt.machinelearningservices.models.BatchRetrySettings + """ + + _attribute_map = { + 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'error_threshold': {'key': 'errorThreshold', 'type': 'int'}, + 'logging_level': {'key': 'loggingLevel', 'type': 'str'}, + 'mini_batch_size': {'key': 'miniBatchSize', 'type': 'long'}, + 'model': {'key': 'model', 'type': 'AssetReferenceBase'}, + 'output_configuration': {'key': 'outputConfiguration', 'type': 'BatchOutputConfiguration'}, + 'partition_keys': {'key': 'partitionKeys', 'type': '[str]'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'retry_settings': {'key': 'retrySettings', 'type': 'BatchRetrySettings'}, + } + + def __init__(self, *, code_configuration=None, compute=None, description: str=None, environment_id: str=None, environment_variables=None, error_threshold: int=None, logging_level=None, mini_batch_size: int=None, model=None, output_configuration=None, partition_keys=None, properties=None, retry_settings=None, **kwargs) -> None: + super(BatchDeployment, self).__init__(**kwargs) + self.code_configuration = code_configuration + self.compute = compute + self.description = description + self.environment_id = environment_id + self.environment_variables = environment_variables + self.error_threshold = error_threshold + self.logging_level = logging_level + self.mini_batch_size = mini_batch_size + self.model = model + self.output_configuration = output_configuration + self.partition_keys = partition_keys + self.properties = properties + self.retry_settings = retry_settings + + +class TrackedResource(Resource): + """Tracked Resource. + + The resource model definition for an Azure Resource Manager tracked top + level resource which has 'tags' and a 'location'. Variables are only populated by the server, and will be ignored when sending a request. - :ivar idle_node_count: Idle node count. Number of compute nodes in idle - state. - :vartype idle_node_count: int - :ivar running_node_count: Running node count. Number of compute nodes - which are running jobs. - :vartype running_node_count: int - :ivar preparing_node_count: Preparing node count. Number of compute nodes - which are being prepared. - :vartype preparing_node_count: int - :ivar unusable_node_count: Unusable node count. Number of compute nodes - which are in unusable state. - :vartype unusable_node_count: int - :ivar leaving_node_count: Leaving node count. Number of compute nodes - which are leaving the amlCompute. - :vartype leaving_node_count: int - :ivar preempted_node_count: Preempted node count. Number of compute nodes - which are in preempted state. - :vartype preempted_node_count: int + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives + :type location: str """ _validation = { - 'idle_node_count': {'readonly': True}, - 'running_node_count': {'readonly': True}, - 'preparing_node_count': {'readonly': True}, - 'unusable_node_count': {'readonly': True}, - 'leaving_node_count': {'readonly': True}, - 'preempted_node_count': {'readonly': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, } _attribute_map = { - 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, - 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, - 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, - 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, - 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, - 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, } - def __init__(self, **kwargs) -> None: - super(NodeStateCounts, self).__init__(**kwargs) - self.idle_node_count = None - self.running_node_count = None - self.preparing_node_count = None - self.unusable_node_count = None - self.leaving_node_count = None - self.preempted_node_count = None + def __init__(self, *, location: str, tags=None, **kwargs) -> None: + super(TrackedResource, self).__init__(**kwargs) + self.tags = tags + self.location = location -class Operation(Model): - """Azure Machine Learning workspace REST API operation. +class BatchDeploymentTrackedResource(TrackedResource): + """BatchDeploymentTrackedResource. - :param name: Operation name: {provider}/{resource}/{operation} - :type name: str - :param display: Display name of operation - :type display: ~azure.mgmt.machinelearningservices.models.OperationDisplay + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives + :type location: str + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.BatchDeployment + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData """ + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'BatchDeployment'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, } - def __init__(self, *, name: str=None, display=None, **kwargs) -> None: - super(Operation, self).__init__(**kwargs) - self.name = name - self.display = display + def __init__(self, *, location: str, properties, tags=None, identity=None, kind: str=None, **kwargs) -> None: + super(BatchDeploymentTrackedResource, self).__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.system_data = None -class OperationDisplay(Model): - """Display name of operation. +class BatchEndpoint(Model): + """Batch endpoint configuration. - :param provider: The resource provider name: - Microsoft.MachineLearningExperimentation - :type provider: str - :param resource: The resource on which the operation is performed. - :type resource: str - :param operation: The operation that users can perform. - :type operation: str - :param description: The description for the operation. + Variables are only populated by the server, and will be ignored when + sending a request. + + :param auth_mode: Enum to determine endpoint authentication mode. Possible + values include: 'AMLToken', 'Key', 'AADToken' + :type auth_mode: str or + ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :param description: Description of the inference endpoint. :type description: str + :param keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be + retrieved using the ListKeys API. + :type keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :param properties: Property dictionary. Properties can be added, but not + removed or altered. + :type properties: dict[str, str] + :ivar scoring_uri: Endpoint URI. + :vartype scoring_uri: str + :ivar swagger_uri: Endpoint Swagger URI. + :vartype swagger_uri: str + :param traffic: Traffic rules on how the traffic will be routed across + deployments. + :type traffic: dict[str, int] """ + _validation = { + 'scoring_uri': {'readonly': True}, + 'swagger_uri': {'readonly': True}, + } + _attribute_map = { - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, + 'auth_mode': {'key': 'authMode', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, + 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, + 'traffic': {'key': 'traffic', 'type': '{int}'}, } - def __init__(self, *, provider: str=None, resource: str=None, operation: str=None, description: str=None, **kwargs) -> None: - super(OperationDisplay, self).__init__(**kwargs) - self.provider = provider - self.resource = resource - self.operation = operation + def __init__(self, *, auth_mode=None, description: str=None, keys=None, properties=None, traffic=None, **kwargs) -> None: + super(BatchEndpoint, self).__init__(**kwargs) + self.auth_mode = auth_mode self.description = description + self.keys = keys + self.properties = properties + self.scoring_uri = None + self.swagger_uri = None + self.traffic = traffic -class Password(Model): - """Password. +class BatchEndpointTrackedResource(TrackedResource): + """BatchEndpointTrackedResource. Variables are only populated by the server, and will be ignored when sending a request. - :ivar name: + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource :vartype name: str - :ivar value: - :vartype value: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives + :type location: str + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData """ _validation = { + 'id': {'readonly': True}, 'name': {'readonly': True}, - 'value': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, } _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'BatchEndpoint'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, location: str, properties, tags=None, identity=None, kind: str=None, **kwargs) -> None: + super(BatchEndpointTrackedResource, self).__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.system_data = None + + +class BatchOutputConfiguration(Model): + """Batch inference output configuration. + + :param append_row_file_name: Customized output file name for append_row + output action. + :type append_row_file_name: str + :param output_action: Indicates how the output will be organized. Possible + values include: 'SummaryOnly', 'AppendRow' + :type output_action: str or + ~azure.mgmt.machinelearningservices.models.BatchOutputAction + """ + + _attribute_map = { + 'append_row_file_name': {'key': 'appendRowFileName', 'type': 'str'}, + 'output_action': {'key': 'outputAction', 'type': 'str'}, + } + + def __init__(self, *, append_row_file_name: str=None, output_action=None, **kwargs) -> None: + super(BatchOutputConfiguration, self).__init__(**kwargs) + self.append_row_file_name = append_row_file_name + self.output_action = output_action + + +class BatchRetrySettings(Model): + """Retry settings for a batch inference operation. + + :param max_retries: Maximum retry count for a mini-batch + :type max_retries: int + :param timeout: Invocation timeout for a mini-batch, in ISO 8601 format. + :type timeout: timedelta + """ + + _attribute_map = { + 'max_retries': {'key': 'maxRetries', 'type': 'int'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + } + + def __init__(self, *, max_retries: int=None, timeout=None, **kwargs) -> None: + super(BatchRetrySettings, self).__init__(**kwargs) + self.max_retries = max_retries + self.timeout = timeout + + +class CertificateDatastoreCredentials(DatastoreCredentials): + """Certificate datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + :param authority_url: Authority URL used for authentication. + :type authority_url: str + :param client_id: Required. Service principal client ID. + :type client_id: str + :param resource_uri: Resource the service principal has access to. + :type resource_uri: str + :param secrets: Service principal secrets. + :type secrets: + ~azure.mgmt.machinelearningservices.models.CertificateDatastoreSecrets + :param tenant_id: Required. ID of the tenant to which the service + principal belongs. + :type tenant_id: str + :param thumbprint: Required. Thumbprint of the certificate used for + authentication. + :type thumbprint: str + """ + + _validation = { + 'credentials_type': {'required': True}, + 'client_id': {'required': True}, + 'tenant_id': {'required': True}, + 'thumbprint': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'authority_url': {'key': 'authorityUrl', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'resource_uri': {'key': 'resourceUri', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'CertificateDatastoreSecrets'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'thumbprint': {'key': 'thumbprint', 'type': 'str'}, + } + + def __init__(self, *, client_id: str, tenant_id: str, thumbprint: str, authority_url: str=None, resource_uri: str=None, secrets=None, **kwargs) -> None: + super(CertificateDatastoreCredentials, self).__init__(**kwargs) + self.authority_url = authority_url + self.client_id = client_id + self.resource_uri = resource_uri + self.secrets = secrets + self.tenant_id = tenant_id + self.thumbprint = thumbprint + self.credentials_type = 'Certificate' + + +class CertificateDatastoreSecrets(DatastoreSecrets): + """Datastore certificate secrets. + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + :param certificate: Service principal certificate. + :type certificate: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'certificate': {'key': 'certificate', 'type': 'str'}, + } + + def __init__(self, *, certificate: str=None, **kwargs) -> None: + super(CertificateDatastoreSecrets, self).__init__(**kwargs) + self.certificate = certificate + self.secrets_type = 'Certificate' + + +class CloudError(Model): + """CloudError. + """ + + _attribute_map = { + } + + +class ClusterUpdateParameters(Model): + """AmlCompute update parameters. + + :param scale_settings: Scale settings. Desired scale settings for the + amlCompute. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.ScaleSettings + """ + + _attribute_map = { + 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'}, + } + + def __init__(self, *, scale_settings=None, **kwargs) -> None: + super(ClusterUpdateParameters, self).__init__(**kwargs) + self.scale_settings = scale_settings + + +class ExportSummary(Model): + """ExportSummary. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CocoExportSummary, CsvExportSummary, DatasetExportSummary + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_time_utc: The time when the export was completed. + :vartype end_time_utc: datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: long + :ivar labeling_job_id: Name and identifier of the job containing exported + labels. + :vartype labeling_job_id: str + :ivar start_time_utc: The time when the export was requested. + :vartype start_time_utc: datetime + :param format: Required. Constant filled by server. + :type format: str + """ + + _validation = { + 'end_time_utc': {'readonly': True}, + 'exported_row_count': {'readonly': True}, + 'labeling_job_id': {'readonly': True}, + 'start_time_utc': {'readonly': True}, + 'format': {'required': True}, + } + + _attribute_map = { + 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'}, + 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, + 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, + 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'}, + 'format': {'key': 'format', 'type': 'str'}, + } + + _subtype_map = { + 'format': {'Coco': 'CocoExportSummary', 'CSV': 'CsvExportSummary', 'Dataset': 'DatasetExportSummary'} + } + + def __init__(self, **kwargs) -> None: + super(ExportSummary, self).__init__(**kwargs) + self.end_time_utc = None + self.exported_row_count = None + self.labeling_job_id = None + self.start_time_utc = None + self.format = None + + +class CocoExportSummary(ExportSummary): + """CocoExportSummary. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_time_utc: The time when the export was completed. + :vartype end_time_utc: datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: long + :ivar labeling_job_id: Name and identifier of the job containing exported + labels. + :vartype labeling_job_id: str + :ivar start_time_utc: The time when the export was requested. + :vartype start_time_utc: datetime + :param format: Required. Constant filled by server. + :type format: str + :ivar container_name: The container name to which the labels will be + exported. + :vartype container_name: str + :ivar snapshot_path: The output path where the labels will be exported. + :vartype snapshot_path: str + """ + + _validation = { + 'end_time_utc': {'readonly': True}, + 'exported_row_count': {'readonly': True}, + 'labeling_job_id': {'readonly': True}, + 'start_time_utc': {'readonly': True}, + 'format': {'required': True}, + 'container_name': {'readonly': True}, + 'snapshot_path': {'readonly': True}, + } + + _attribute_map = { + 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'}, + 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, + 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, + 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'}, + 'format': {'key': 'format', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(CocoExportSummary, self).__init__(**kwargs) + self.container_name = None + self.snapshot_path = None + self.format = 'Coco' + + +class CodeConfiguration(Model): + """Configuration for a scoring code asset. + + All required parameters must be populated in order to send to Azure. + + :param code_id: ARM resource ID of the code asset. + :type code_id: str + :param scoring_script: Required. The script to execute on startup. eg. + "score.py" + :type scoring_script: str + """ + + _validation = { + 'scoring_script': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'code_id': {'key': 'codeId', 'type': 'str'}, + 'scoring_script': {'key': 'scoringScript', 'type': 'str'}, + } + + def __init__(self, *, scoring_script: str, code_id: str=None, **kwargs) -> None: + super(CodeConfiguration, self).__init__(**kwargs) + self.code_id = code_id + self.scoring_script = scoring_script + + +class CodeContainer(Model): + """Container for code asset versions. + + :param description: The asset description text. + :type description: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, description: str=None, properties=None, tags=None, **kwargs) -> None: + super(CodeContainer, self).__init__(**kwargs) + self.description = description + self.properties = properties + self.tags = tags + + +class CodeContainerResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.CodeContainer + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'CodeContainer'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(CodeContainerResource, self).__init__(**kwargs) + self.properties = properties + self.system_data = None + + +class CodeVersion(Model): + """Code asset version details. + + All required parameters must be populated in order to send to Azure. + + :param datastore_id: ARM resource ID of the datastore where the asset is + located. + :type datastore_id: str + :param description: The asset description text. + :type description: str + :param is_anonymous: If the name version are system generated (anonymous + registration). + :type is_anonymous: bool + :param path: Required. The path of the file/directory in the datastore. + :type path: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _validation = { + 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'path': {'key': 'path', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, path: str, datastore_id: str=None, description: str=None, is_anonymous: bool=None, properties=None, tags=None, **kwargs) -> None: + super(CodeVersion, self).__init__(**kwargs) + self.datastore_id = datastore_id + self.description = description + self.is_anonymous = is_anonymous + self.path = path + self.properties = properties + self.tags = tags + + +class CodeVersionResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.CodeVersion + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'CodeVersion'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(CodeVersionResource, self).__init__(**kwargs) + self.properties = properties + self.system_data = None + + +class JobBase(Model): + """Base definition for a job. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CommandJob, SweepJob + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param description: The asset description text. + :type description: str + :ivar interaction_endpoints: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of + FileStreamObject. + :vartype interaction_endpoints: dict[str, + ~azure.mgmt.machinelearningservices.models.JobEndpoint] + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :ivar provisioning_state: Specifies the job provisioning state. Possible + values include: 'Succeeded', 'Failed', 'Canceled', 'InProgress' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.JobProvisioningState + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + :param job_type: Required. Constant filled by server. + :type job_type: str + """ + + _validation = { + 'interaction_endpoints': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'job_type': {'required': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + } + + _subtype_map = { + 'job_type': {'Command': 'CommandJob', 'Sweep': 'SweepJob'} + } + + def __init__(self, *, description: str=None, properties=None, tags=None, **kwargs) -> None: + super(JobBase, self).__init__(**kwargs) + self.description = description + self.interaction_endpoints = None + self.properties = properties + self.provisioning_state = None + self.tags = tags + self.job_type = None + + +class CommandJob(JobBase): + """Command job definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param description: The asset description text. + :type description: str + :ivar interaction_endpoints: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of + FileStreamObject. + :vartype interaction_endpoints: dict[str, + ~azure.mgmt.machinelearningservices.models.JobEndpoint] + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :ivar provisioning_state: Specifies the job provisioning state. Possible + values include: 'Succeeded', 'Failed', 'Canceled', 'InProgress' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.JobProvisioningState + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + :param job_type: Required. Constant filled by server. + :type job_type: str + :param code_id: ARM resource ID of the code asset. + :type code_id: str + :param command: Required. The command to execute on startup of the job. + eg. "python train.py" + :type command: str + :param compute: Required. Compute binding for the job. + :type compute: + ~azure.mgmt.machinelearningservices.models.ComputeConfiguration + :param distribution: Distribution configuration of the job. If set, this + should be one of Mpi, Tensorflow, PyTorch, or null. + :type distribution: + ~azure.mgmt.machinelearningservices.models.DistributionConfiguration + :param environment_id: The ARM resource ID of the Environment + specification for the job. + :type environment_id: str + :param environment_variables: Environment variables included in the job. + :type environment_variables: dict[str, str] + :param experiment_name: The name of the experiment the job belongs to. If + not set, the job is placed in the "Default" experiment. + :type experiment_name: str + :param identity: Identity configuration. If set, this should be one of + AmlToken, ManagedIdentity, or null. + Defaults to AmlToken if null. + :type identity: + ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :param input_data_bindings: Mapping of input data bindings used in the + job. + :type input_data_bindings: dict[str, + ~azure.mgmt.machinelearningservices.models.InputDataBinding] + :ivar output: Location of the job output logs and artifacts. + :vartype output: ~azure.mgmt.machinelearningservices.models.JobOutput + :param output_data_bindings: Mapping of output data bindings used in the + job. + :type output_data_bindings: dict[str, + ~azure.mgmt.machinelearningservices.models.OutputDataBinding] + :ivar parameters: Input parameters. + :vartype parameters: dict[str, object] + :param priority: Job priority for scheduling policy. Only applies to + AMLCompute. + Private preview feature and only available to users on the allow list. + :type priority: int + :ivar status: Status of the job. Possible values include: 'NotStarted', + 'Starting', 'Provisioning', 'Preparing', 'Queued', 'Running', + 'Finalizing', 'CancelRequested', 'Completed', 'Failed', 'Canceled', + 'NotResponding', 'Paused', 'Unknown' + :vartype status: str or + ~azure.mgmt.machinelearningservices.models.JobStatus + :param timeout: The max run duration in ISO 8601 format, after which the + job will be cancelled. Only supports duration with precision as low as + Seconds. + :type timeout: timedelta + """ + + _validation = { + 'interaction_endpoints': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'job_type': {'required': True}, + 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + 'compute': {'required': True}, + 'output': {'readonly': True}, + 'parameters': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'code_id': {'key': 'codeId', 'type': 'str'}, + 'command': {'key': 'command', 'type': 'str'}, + 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, + 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'experiment_name': {'key': 'experimentName', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, + 'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'}, + 'output': {'key': 'output', 'type': 'JobOutput'}, + 'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'priority': {'key': 'priority', 'type': 'int'}, + 'status': {'key': 'status', 'type': 'str'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + } + + def __init__(self, *, command: str, compute, description: str=None, properties=None, tags=None, code_id: str=None, distribution=None, environment_id: str=None, environment_variables=None, experiment_name: str=None, identity=None, input_data_bindings=None, output_data_bindings=None, priority: int=None, timeout=None, **kwargs) -> None: + super(CommandJob, self).__init__(description=description, properties=properties, tags=tags, **kwargs) + self.code_id = code_id + self.command = command + self.compute = compute + self.distribution = distribution + self.environment_id = environment_id + self.environment_variables = environment_variables + self.experiment_name = experiment_name + self.identity = identity + self.input_data_bindings = input_data_bindings + self.output = None + self.output_data_bindings = output_data_bindings + self.parameters = None + self.priority = priority + self.status = None + self.timeout = timeout + self.job_type = 'Command' + + +class ComputeConfiguration(Model): + """Configuration for compute binding. + + :param instance_count: Number of instances or nodes. + :type instance_count: int + :param instance_type: SKU type to run on. + :type instance_type: str + :param is_local: Set to true for jobs running on local compute. + :type is_local: bool + :param location: Location for virtual cluster run. + :type location: str + :param properties: Additional properties. + :type properties: dict[str, str] + :param target: ARM resource ID of the compute resource. + :type target: str + """ + + _attribute_map = { + 'instance_count': {'key': 'instanceCount', 'type': 'int'}, + 'instance_type': {'key': 'instanceType', 'type': 'str'}, + 'is_local': {'key': 'isLocal', 'type': 'bool'}, + 'location': {'key': 'location', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'target': {'key': 'target', 'type': 'str'}, + } + + def __init__(self, *, instance_count: int=None, instance_type: str=None, is_local: bool=None, location: str=None, properties=None, target: str=None, **kwargs) -> None: + super(ComputeConfiguration, self).__init__(**kwargs) + self.instance_count = instance_count + self.instance_type = instance_type + self.is_local = is_local + self.location = location + self.properties = properties + self.target = target + + +class ComputeInstance(Compute): + """An Azure Machine Learning compute instance. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_location: Location for the underlying compute + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values + are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible + values include: 'Unknown', 'Updating', 'Creating', 'Deleting', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: datetime + :param resource_id: ARM resource id of the underlying compute + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning + :vartype provisioning_errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned + by user and brought from outside if true, or machine learning service + provisioned it if false. + :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool + :param compute_type: Required. Constant filled by server. + :type compute_type: str + :param properties: Compute Instance properties + :type properties: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, + } + + def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, disable_local_auth: bool=None, properties=None, **kwargs) -> None: + super(ComputeInstance, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs) + self.properties = properties + self.compute_type = 'ComputeInstance' + + +class ComputeInstanceApplication(Model): + """Defines an Aml Instance application and its connectivity endpoint URI. + + :param display_name: Name of the ComputeInstance application. + :type display_name: str + :param endpoint_uri: Application' endpoint URI. + :type endpoint_uri: str + """ + + _attribute_map = { + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, + } + + def __init__(self, *, display_name: str=None, endpoint_uri: str=None, **kwargs) -> None: + super(ComputeInstanceApplication, self).__init__(**kwargs) + self.display_name = display_name + self.endpoint_uri = endpoint_uri + + +class ComputeInstanceConnectivityEndpoints(Model): + """Defines all connectivity endpoints and properties for an ComputeInstance. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar public_ip_address: Public IP Address of this ComputeInstance. + :vartype public_ip_address: str + :ivar private_ip_address: Private IP Address of this ComputeInstance + (local to the VNET in which the compute instance is deployed). + :vartype private_ip_address: str + """ + + _validation = { + 'public_ip_address': {'readonly': True}, + 'private_ip_address': {'readonly': True}, + } + + _attribute_map = { + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, + 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs) + self.public_ip_address = None + self.private_ip_address = None + + +class ComputeInstanceCreatedBy(Model): + """Describes information on user who created this ComputeInstance. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar user_name: Name of the user. + :vartype user_name: str + :ivar user_org_id: Uniquely identifies user' Azure Active Directory + organization. + :vartype user_org_id: str + :ivar user_id: Uniquely identifies the user within his/her organization. + :vartype user_id: str + """ + + _validation = { + 'user_name': {'readonly': True}, + 'user_org_id': {'readonly': True}, + 'user_id': {'readonly': True}, + } + + _attribute_map = { + 'user_name': {'key': 'userName', 'type': 'str'}, + 'user_org_id': {'key': 'userOrgId', 'type': 'str'}, + 'user_id': {'key': 'userId', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ComputeInstanceCreatedBy, self).__init__(**kwargs) + self.user_name = None + self.user_org_id = None + self.user_id = None + + +class ComputeInstanceLastOperation(Model): + """The last operation on ComputeInstance. + + :param operation_name: Name of the last operation. Possible values + include: 'Create', 'Start', 'Stop', 'Restart', 'Reimage', 'Delete' + :type operation_name: str or + ~azure.mgmt.machinelearningservices.models.OperationName + :param operation_time: Time of the last operation. + :type operation_time: datetime + :param operation_status: Operation status. Possible values include: + 'InProgress', 'Succeeded', 'CreateFailed', 'StartFailed', 'StopFailed', + 'RestartFailed', 'ReimageFailed', 'DeleteFailed' + :type operation_status: str or + ~azure.mgmt.machinelearningservices.models.OperationStatus + """ + + _attribute_map = { + 'operation_name': {'key': 'operationName', 'type': 'str'}, + 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'}, + 'operation_status': {'key': 'operationStatus', 'type': 'str'}, + } + + def __init__(self, *, operation_name=None, operation_time=None, operation_status=None, **kwargs) -> None: + super(ComputeInstanceLastOperation, self).__init__(**kwargs) + self.operation_name = operation_name + self.operation_time = operation_time + self.operation_status = operation_status + + +class ComputeInstanceProperties(Model): + """Compute Instance properties. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param vm_size: Virtual Machine Size + :type vm_size: str + :param subnet: Subnet. Virtual network subnet resource ID the compute + nodes belong to. + :type subnet: ~azure.mgmt.machinelearningservices.models.ResourceId + :param application_sharing_policy: Sharing policy for applications on this + compute instance. Policy for sharing applications on this compute instance + among users of parent workspace. If Personal, only the creator can access + applications on this compute instance. When Shared, any workspace user can + access applications on this instance depending on his/her assigned role. + Possible values include: 'Personal', 'Shared'. Default value: "Shared" . + :type application_sharing_policy: str or + ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy + :param ssh_settings: Specifies policy and settings for SSH access. + :type ssh_settings: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings + :ivar connectivity_endpoints: Describes all connectivity endpoints + available for this ComputeInstance. + :vartype connectivity_endpoints: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceConnectivityEndpoints + :ivar applications: Describes available applications and their endpoints + on this ComputeInstance. + :vartype applications: + list[~azure.mgmt.machinelearningservices.models.ComputeInstanceApplication] + :ivar created_by: Describes information on user who created this + ComputeInstance. + :vartype created_by: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceCreatedBy + :ivar errors: Errors. Collection of errors encountered on this + ComputeInstance. + :vartype errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar state: The current state of this ComputeInstance. Possible values + include: 'Creating', 'CreateFailed', 'Deleting', 'Running', 'Restarting', + 'JobRunning', 'SettingUp', 'SetupFailed', 'Starting', 'Stopped', + 'Stopping', 'UserSettingUp', 'UserSetupFailed', 'Unknown', 'Unusable' + :vartype state: str or + ~azure.mgmt.machinelearningservices.models.ComputeInstanceState + :param compute_instance_authorization_type: Compute Instance Authorization + type. The Compute Instance Authorization type. Available values are + personal (default). Possible values include: 'personal'. Default value: + "personal" . + :type compute_instance_authorization_type: str or + ~azure.mgmt.machinelearningservices.models.ComputeInstanceAuthorizationType + :param personal_compute_instance_settings: Personal Compute Instance + settings. Settings for a personal compute instance. + :type personal_compute_instance_settings: + ~azure.mgmt.machinelearningservices.models.PersonalComputeInstanceSettings + :param setup_scripts: Details of customized scripts to execute for setting + up the cluster. + :type setup_scripts: + ~azure.mgmt.machinelearningservices.models.SetupScripts + :ivar last_operation: The last operation on ComputeInstance. + :vartype last_operation: + ~azure.mgmt.machinelearningservices.models.ComputeInstanceLastOperation + :param schedules: The list of schedules to be applied on the compute + instance. + :type schedules: + ~azure.mgmt.machinelearningservices.models.ComputeSchedules + """ + + _validation = { + 'connectivity_endpoints': {'readonly': True}, + 'applications': {'readonly': True}, + 'created_by': {'readonly': True}, + 'errors': {'readonly': True}, + 'state': {'readonly': True}, + 'last_operation': {'readonly': True}, + } + + _attribute_map = { + 'vm_size': {'key': 'vmSize', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, + 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'}, + 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'}, + 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'}, + 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'}, + 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'}, + 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'}, + 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'}, + 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'}, + 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'}, + 'schedules': {'key': 'schedules', 'type': 'ComputeSchedules'}, + } + + def __init__(self, *, vm_size: str=None, subnet=None, application_sharing_policy="Shared", ssh_settings=None, compute_instance_authorization_type="personal", personal_compute_instance_settings=None, setup_scripts=None, schedules=None, **kwargs) -> None: + super(ComputeInstanceProperties, self).__init__(**kwargs) + self.vm_size = vm_size + self.subnet = subnet + self.application_sharing_policy = application_sharing_policy + self.ssh_settings = ssh_settings + self.connectivity_endpoints = None + self.applications = None + self.created_by = None + self.errors = None + self.state = None + self.compute_instance_authorization_type = compute_instance_authorization_type + self.personal_compute_instance_settings = personal_compute_instance_settings + self.setup_scripts = setup_scripts + self.last_operation = None + self.schedules = schedules + + +class ComputeInstanceSshSettings(Model): + """Specifies policy and settings for SSH access. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param ssh_public_access: Access policy for SSH. State of the public SSH + port. Possible values are: Disabled - Indicates that the public ssh port + is closed on this instance. Enabled - Indicates that the public ssh port + is open and accessible according to the VNet/subnet policy if applicable. + Possible values include: 'Enabled', 'Disabled'. Default value: "Disabled" + . + :type ssh_public_access: str or + ~azure.mgmt.machinelearningservices.models.SshPublicAccess + :ivar admin_user_name: Describes the admin user name. + :vartype admin_user_name: str + :ivar ssh_port: Describes the port for connecting through SSH. + :vartype ssh_port: int + :param admin_public_key: Specifies the SSH rsa public key file as a + string. Use "ssh-keygen -t rsa -b 2048" to generate your SSH key pairs. + :type admin_public_key: str + """ + + _validation = { + 'admin_user_name': {'readonly': True}, + 'ssh_port': {'readonly': True}, + } + + _attribute_map = { + 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'}, + 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, + 'ssh_port': {'key': 'sshPort', 'type': 'int'}, + 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'}, + } + + def __init__(self, *, ssh_public_access="Disabled", admin_public_key: str=None, **kwargs) -> None: + super(ComputeInstanceSshSettings, self).__init__(**kwargs) + self.ssh_public_access = ssh_public_access + self.admin_user_name = None + self.ssh_port = None + self.admin_public_key = admin_public_key + + +class ComputeNodesInformation(Model): + """Compute nodes information related to a Machine Learning compute. Might + differ for every type of compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar next_link: The continuation token. + :vartype next_link: str + :param compute_type: Required. Constant filled by server. + :type compute_type: str + """ + + _validation = { + 'next_link': {'readonly': True}, + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ComputeNodesInformation, self).__init__(**kwargs) + self.next_link = None + self.compute_type = None + + +class ComputeResource(Resource): + """Machine Learning compute object wrapped into ARM resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Compute properties + :type properties: ~azure.mgmt.machinelearningservices.models.Compute + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.machinelearningservices.models.Identity + :param location: Specifies the location of the resource. + :type location: str + :param tags: Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure.mgmt.machinelearningservices.models.Sku + :param system_data: + :type system_data: ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Compute'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, properties=None, identity=None, location: str=None, tags=None, sku=None, system_data=None, **kwargs) -> None: + super(ComputeResource, self).__init__(**kwargs) + self.properties = properties + self.identity = identity + self.location = location + self.tags = tags + self.sku = sku + self.system_data = system_data + + +class ComputeSchedules(Model): + """The list of schedules to be applied on the computes. + + :param compute_start_stop: The list of compute start stop schedules to be + applied. + :type compute_start_stop: + list[~azure.mgmt.machinelearningservices.models.ComputeStartStopSchedule] + """ + + _attribute_map = { + 'compute_start_stop': {'key': 'computeStartStop', 'type': '[ComputeStartStopSchedule]'}, + } + + def __init__(self, *, compute_start_stop=None, **kwargs) -> None: + super(ComputeSchedules, self).__init__(**kwargs) + self.compute_start_stop = compute_start_stop + + +class ComputeStartStopSchedule(Model): + """Compute start stop schedule properties. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Schedule id. + :vartype id: str + :ivar provisioning_status: The current deployment state of schedule. + Possible values include: 'Completed', 'Provisioning', 'Failed' + :vartype provisioning_status: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningStatus + :param status: Possible values include: 'Enabled', 'Disabled' + :type status: str or + ~azure.mgmt.machinelearningservices.models.ScheduleStatus + :param trigger_type: Possible values include: 'Recurrence', 'Cron' + :type trigger_type: str or + ~azure.mgmt.machinelearningservices.models.TriggerType + :param action: Possible values include: 'Start', 'Stop' + :type action: str or + ~azure.mgmt.machinelearningservices.models.ComputePowerAction + :param recurrence: + :type recurrence: ~azure.mgmt.machinelearningservices.models.Recurrence + :param cron: + :type cron: ~azure.mgmt.machinelearningservices.models.Cron + """ + + _validation = { + 'id': {'readonly': True}, + 'provisioning_status': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + 'action': {'key': 'action', 'type': 'str'}, + 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'}, + 'cron': {'key': 'cron', 'type': 'Cron'}, + } + + def __init__(self, *, status=None, trigger_type=None, action=None, recurrence=None, cron=None, **kwargs) -> None: + super(ComputeStartStopSchedule, self).__init__(**kwargs) + self.id = None + self.provisioning_status = None + self.status = status + self.trigger_type = trigger_type + self.action = action + self.recurrence = recurrence + self.cron = cron + + +class ContainerResourceRequirements(Model): + """The resource requirements for the container (cpu and memory). + + :param cpu: The minimum amount of CPU cores to be used by the container. + More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/ + :type cpu: float + :param cpu_limit: The maximum amount of CPU cores allowed to be used by + the container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/ + :type cpu_limit: float + :param memory_in_gb: The minimum amount of memory (in GB) to be used by + the container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/ + :type memory_in_gb: float + :param memory_in_gb_limit: The maximum amount of memory (in GB) allowed to + be used by the container. More info: + https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/ + :type memory_in_gb_limit: float + :param gpu: The number of GPU cores in the container. + :type gpu: int + :param fpga: The number of FPGA PCIE devices exposed to the container. + Must be multiple of 2. + :type fpga: int + """ + + _attribute_map = { + 'cpu': {'key': 'cpu', 'type': 'float'}, + 'cpu_limit': {'key': 'cpuLimit', 'type': 'float'}, + 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'}, + 'memory_in_gb_limit': {'key': 'memoryInGBLimit', 'type': 'float'}, + 'gpu': {'key': 'gpu', 'type': 'int'}, + 'fpga': {'key': 'fpga', 'type': 'int'}, + } + + def __init__(self, *, cpu: float=None, cpu_limit: float=None, memory_in_gb: float=None, memory_in_gb_limit: float=None, gpu: int=None, fpga: int=None, **kwargs) -> None: + super(ContainerResourceRequirements, self).__init__(**kwargs) + self.cpu = cpu + self.cpu_limit = cpu_limit + self.memory_in_gb = memory_in_gb + self.memory_in_gb_limit = memory_in_gb_limit + self.gpu = gpu + self.fpga = fpga + + +class CosmosDbSettings(Model): + """CosmosDbSettings. + + :param collections_throughput: The throughput of the collections in + cosmosdb database + :type collections_throughput: int + """ + + _attribute_map = { + 'collections_throughput': {'key': 'collectionsThroughput', 'type': 'int'}, + } + + def __init__(self, *, collections_throughput: int=None, **kwargs) -> None: + super(CosmosDbSettings, self).__init__(**kwargs) + self.collections_throughput = collections_throughput + + +class Cron(Model): + """The workflow trigger cron for ComputeStartStop schedule type. + + :param start_time: The start time. + :type start_time: str + :param time_zone: The time zone. + :type time_zone: str + :param expression: The cron expression. + :type expression: str + """ + + _attribute_map = { + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'expression': {'key': 'expression', 'type': 'str'}, + } + + def __init__(self, *, start_time: str=None, time_zone: str=None, expression: str=None, **kwargs) -> None: + super(Cron, self).__init__(**kwargs) + self.start_time = start_time + self.time_zone = time_zone + self.expression = expression + + +class CsvExportSummary(ExportSummary): + """CsvExportSummary. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_time_utc: The time when the export was completed. + :vartype end_time_utc: datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: long + :ivar labeling_job_id: Name and identifier of the job containing exported + labels. + :vartype labeling_job_id: str + :ivar start_time_utc: The time when the export was requested. + :vartype start_time_utc: datetime + :param format: Required. Constant filled by server. + :type format: str + :ivar container_name: The container name to which the labels will be + exported. + :vartype container_name: str + :ivar snapshot_path: The output path where the labels will be exported. + :vartype snapshot_path: str + """ + + _validation = { + 'end_time_utc': {'readonly': True}, + 'exported_row_count': {'readonly': True}, + 'labeling_job_id': {'readonly': True}, + 'start_time_utc': {'readonly': True}, + 'format': {'required': True}, + 'container_name': {'readonly': True}, + 'snapshot_path': {'readonly': True}, + } + + _attribute_map = { + 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'}, + 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, + 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, + 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'}, + 'format': {'key': 'format', 'type': 'str'}, + 'container_name': {'key': 'containerName', 'type': 'str'}, + 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(CsvExportSummary, self).__init__(**kwargs) + self.container_name = None + self.snapshot_path = None + self.format = 'CSV' + + +class Databricks(Compute): + """A DataFactory compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_location: Location for the underlying compute + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values + are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible + values include: 'Unknown', 'Updating', 'Creating', 'Deleting', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: datetime + :param resource_id: ARM resource id of the underlying compute + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning + :vartype provisioning_errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned + by user and brought from outside if true, or machine learning service + provisioned it if false. + :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool + :param compute_type: Required. Constant filled by server. + :type compute_type: str + :param properties: + :type properties: + ~azure.mgmt.machinelearningservices.models.DatabricksProperties + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, + } + + def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, disable_local_auth: bool=None, properties=None, **kwargs) -> None: + super(Databricks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs) + self.properties = properties + self.compute_type = 'Databricks' + + +class DatabricksComputeSecrets(ComputeSecrets): + """Secrets related to a Machine Learning compute based on Databricks. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. Constant filled by server. + :type compute_type: str + :param databricks_access_token: access token for databricks account. + :type databricks_access_token: str + """ + + _validation = { + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + } + + def __init__(self, *, databricks_access_token: str=None, **kwargs) -> None: + super(DatabricksComputeSecrets, self).__init__(**kwargs) + self.databricks_access_token = databricks_access_token + self.compute_type = 'Databricks' + + +class DatabricksProperties(Model): + """DatabricksProperties. + + :param databricks_access_token: Databricks access token + :type databricks_access_token: str + :param workspace_url: Workspace Url + :type workspace_url: str + """ + + _attribute_map = { + 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + 'workspace_url': {'key': 'workspaceUrl', 'type': 'str'}, + } + + def __init__(self, *, databricks_access_token: str=None, workspace_url: str=None, **kwargs) -> None: + super(DatabricksProperties, self).__init__(**kwargs) + self.databricks_access_token = databricks_access_token + self.workspace_url = workspace_url + + +class DataContainer(Model): + """Container for data asset versions. + + :param description: The asset description text. + :type description: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, description: str=None, properties=None, tags=None, **kwargs) -> None: + super(DataContainer, self).__init__(**kwargs) + self.description = description + self.properties = properties + self.tags = tags + + +class DataContainerResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.DataContainer + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataContainer'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(DataContainerResource, self).__init__(**kwargs) + self.properties = properties + self.system_data = None + + +class DataFactory(Compute): + """A DataFactory compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_location: Location for the underlying compute + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values + are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible + values include: 'Unknown', 'Updating', 'Creating', 'Deleting', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: datetime + :param resource_id: ARM resource id of the underlying compute + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning + :vartype provisioning_errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned + by user and brought from outside if true, or machine learning service + provisioned it if false. + :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool + :param compute_type: Required. Constant filled by server. + :type compute_type: str + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + } + + def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, disable_local_auth: bool=None, **kwargs) -> None: + super(DataFactory, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs) + self.compute_type = 'DataFactory' + + +class DataLakeAnalytics(Compute): + """A DataLakeAnalytics compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_location: Location for the underlying compute + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values + are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible + values include: 'Unknown', 'Updating', 'Creating', 'Deleting', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: datetime + :param resource_id: ARM resource id of the underlying compute + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning + :vartype provisioning_errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned + by user and brought from outside if true, or machine learning service + provisioned it if false. + :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool + :param compute_type: Required. Constant filled by server. + :type compute_type: str + :param properties: + :type properties: + ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsProperties + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'}, + } + + def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, disable_local_auth: bool=None, properties=None, **kwargs) -> None: + super(DataLakeAnalytics, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs) + self.properties = properties + self.compute_type = 'DataLakeAnalytics' + + +class DataLakeAnalyticsProperties(Model): + """DataLakeAnalyticsProperties. + + :param data_lake_store_account_name: DataLake Store Account Name + :type data_lake_store_account_name: str + """ + + _attribute_map = { + 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'}, + } + + def __init__(self, *, data_lake_store_account_name: str=None, **kwargs) -> None: + super(DataLakeAnalyticsProperties, self).__init__(**kwargs) + self.data_lake_store_account_name = data_lake_store_account_name + + +class DataPathAssetReference(AssetReferenceBase): + """Reference to an asset via its path in a datastore. + + All required parameters must be populated in order to send to Azure. + + :param reference_type: Required. Constant filled by server. + :type reference_type: str + :param datastore_id: ARM resource ID of the datastore where the asset is + located. + :type datastore_id: str + :param path: The path of the file/directory in the datastore. + :type path: str + """ + + _validation = { + 'reference_type': {'required': True}, + } + + _attribute_map = { + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, *, datastore_id: str=None, path: str=None, **kwargs) -> None: + super(DataPathAssetReference, self).__init__(**kwargs) + self.datastore_id = datastore_id + self.path = path + self.reference_type = 'DataPath' + + +class DatasetExportSummary(ExportSummary): + """DatasetExportSummary. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar end_time_utc: The time when the export was completed. + :vartype end_time_utc: datetime + :ivar exported_row_count: The total number of labeled datapoints exported. + :vartype exported_row_count: long + :ivar labeling_job_id: Name and identifier of the job containing exported + labels. + :vartype labeling_job_id: str + :ivar start_time_utc: The time when the export was requested. + :vartype start_time_utc: datetime + :param format: Required. Constant filled by server. + :type format: str + :ivar labeled_asset_name: The unique name of the labeled data asset. + :vartype labeled_asset_name: str + """ + + _validation = { + 'end_time_utc': {'readonly': True}, + 'exported_row_count': {'readonly': True}, + 'labeling_job_id': {'readonly': True}, + 'start_time_utc': {'readonly': True}, + 'format': {'required': True}, + 'labeled_asset_name': {'readonly': True}, + } + + _attribute_map = { + 'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'}, + 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, + 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, + 'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'}, + 'format': {'key': 'format', 'type': 'str'}, + 'labeled_asset_name': {'key': 'labeledAssetName', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(DatasetExportSummary, self).__init__(**kwargs) + self.labeled_asset_name = None + self.format = 'Dataset' + + +class DatastoreProperties(Model): + """Datastore definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param contents: Required. Reference to the datastore storage contents. + :type contents: + ~azure.mgmt.machinelearningservices.models.DatastoreContents + :param description: The asset description text. + :type description: str + :ivar has_been_validated: Whether the service has validated access to the + datastore with the provided credentials. + :vartype has_been_validated: bool + :param is_default: Whether this datastore is the default for the + workspace. + :type is_default: bool + :param linked_info: Information about the datastore origin, if linked. + :type linked_info: ~azure.mgmt.machinelearningservices.models.LinkedInfo + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _validation = { + 'contents': {'required': True}, + 'has_been_validated': {'readonly': True}, + } + + _attribute_map = { + 'contents': {'key': 'contents', 'type': 'DatastoreContents'}, + 'description': {'key': 'description', 'type': 'str'}, + 'has_been_validated': {'key': 'hasBeenValidated', 'type': 'bool'}, + 'is_default': {'key': 'isDefault', 'type': 'bool'}, + 'linked_info': {'key': 'linkedInfo', 'type': 'LinkedInfo'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, contents, description: str=None, is_default: bool=None, linked_info=None, properties=None, tags=None, **kwargs) -> None: + super(DatastoreProperties, self).__init__(**kwargs) + self.contents = contents + self.description = description + self.has_been_validated = None + self.is_default = is_default + self.linked_info = linked_info + self.properties = properties + self.tags = tags + + +class DatastorePropertiesResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.DatastoreProperties + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DatastoreProperties'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(DatastorePropertiesResource, self).__init__(**kwargs) + self.properties = properties + self.system_data = None + + +class DataVersion(Model): + """Data asset version details. + + All required parameters must be populated in order to send to Azure. + + :param dataset_type: The Format of dataset. Possible values include: + 'Simple', 'Dataflow' + :type dataset_type: str or + ~azure.mgmt.machinelearningservices.models.DatasetType + :param datastore_id: ARM resource ID of the datastore where the asset is + located. + :type datastore_id: str + :param description: The asset description text. + :type description: str + :param is_anonymous: If the name version are system generated (anonymous + registration). + :type is_anonymous: bool + :param path: Required. The path of the file/directory in the datastore. + :type path: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _validation = { + 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'dataset_type': {'key': 'datasetType', 'type': 'str'}, + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'path': {'key': 'path', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, path: str, dataset_type=None, datastore_id: str=None, description: str=None, is_anonymous: bool=None, properties=None, tags=None, **kwargs) -> None: + super(DataVersion, self).__init__(**kwargs) + self.dataset_type = dataset_type + self.datastore_id = datastore_id + self.description = description + self.is_anonymous = is_anonymous + self.path = path + self.properties = properties + self.tags = tags + + +class DataVersionResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.DataVersion + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'DataVersion'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(DataVersionResource, self).__init__(**kwargs) + self.properties = properties + self.system_data = None + + +class DeploymentLogs(Model): + """DeploymentLogs. + + :param content: The retrieved online deployment logs. + :type content: str + """ + + _attribute_map = { + 'content': {'key': 'content', 'type': 'str'}, + } + + def __init__(self, *, content: str=None, **kwargs) -> None: + super(DeploymentLogs, self).__init__(**kwargs) + self.content = content + + +class DeploymentLogsRequest(Model): + """DeploymentLogsRequest. + + :param container_type: The type of container to retrieve logs from. + Possible values include: 'StorageInitializer', 'InferenceServer' + :type container_type: str or + ~azure.mgmt.machinelearningservices.models.ContainerType + :param tail: The maximum number of lines to tail. + :type tail: int + """ + + _attribute_map = { + 'container_type': {'key': 'containerType', 'type': 'str'}, + 'tail': {'key': 'tail', 'type': 'int'}, + } + + def __init__(self, *, container_type=None, tail: int=None, **kwargs) -> None: + super(DeploymentLogsRequest, self).__init__(**kwargs) + self.container_type = container_type + self.tail = tail + + +class DistributionConfiguration(Model): + """Base definition for job distribution configuration. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: Mpi, PyTorch, TensorFlow + + All required parameters must be populated in order to send to Azure. + + :param distribution_type: Required. Constant filled by server. + :type distribution_type: str + """ + + _validation = { + 'distribution_type': {'required': True}, + } + + _attribute_map = { + 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + } + + _subtype_map = { + 'distribution_type': {'Mpi': 'Mpi', 'PyTorch': 'PyTorch', 'TensorFlow': 'TensorFlow'} + } + + def __init__(self, **kwargs) -> None: + super(DistributionConfiguration, self).__init__(**kwargs) + self.distribution_type = None + + +class DockerSpecification(Model): + """Configuration settings for Docker. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: DockerBuild, DockerImage + + All required parameters must be populated in order to send to Azure. + + :param platform: The platform information of the docker image. + :type platform: + ~azure.mgmt.machinelearningservices.models.DockerImagePlatform + :param docker_specification_type: Required. Constant filled by server. + :type docker_specification_type: str + """ + + _validation = { + 'docker_specification_type': {'required': True}, + } + + _attribute_map = { + 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'}, + 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'}, + } + + _subtype_map = { + 'docker_specification_type': {'Build': 'DockerBuild', 'Image': 'DockerImage'} + } + + def __init__(self, *, platform=None, **kwargs) -> None: + super(DockerSpecification, self).__init__(**kwargs) + self.platform = platform + self.docker_specification_type = None + + +class DockerBuild(DockerSpecification): + """Class to represent configuration settings for Docker Build. + + All required parameters must be populated in order to send to Azure. + + :param platform: The platform information of the docker image. + :type platform: + ~azure.mgmt.machinelearningservices.models.DockerImagePlatform + :param docker_specification_type: Required. Constant filled by server. + :type docker_specification_type: str + :param context: Path to a snapshot of the Docker Context. This property is + only valid if Dockerfile is specified. + The path is relative to the asset path which must contain a single Blob + URI value. + + :type context: str + :param dockerfile: Required. Docker command line instructions to assemble + an image. + + :type dockerfile: str + """ + + _validation = { + 'docker_specification_type': {'required': True}, + 'dockerfile': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'}, + 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'}, + 'context': {'key': 'context', 'type': 'str'}, + 'dockerfile': {'key': 'dockerfile', 'type': 'str'}, + } + + def __init__(self, *, dockerfile: str, platform=None, context: str=None, **kwargs) -> None: + super(DockerBuild, self).__init__(platform=platform, **kwargs) + self.context = context + self.dockerfile = dockerfile + self.docker_specification_type = 'Build' + + +class DockerImage(DockerSpecification): + """Class to represent configuration settings for Docker Build. + + All required parameters must be populated in order to send to Azure. + + :param platform: The platform information of the docker image. + :type platform: + ~azure.mgmt.machinelearningservices.models.DockerImagePlatform + :param docker_specification_type: Required. Constant filled by server. + :type docker_specification_type: str + :param docker_image_uri: Required. Image name of a custom base image. + + :type docker_image_uri: str + """ + + _validation = { + 'docker_specification_type': {'required': True}, + 'docker_image_uri': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'platform': {'key': 'platform', 'type': 'DockerImagePlatform'}, + 'docker_specification_type': {'key': 'dockerSpecificationType', 'type': 'str'}, + 'docker_image_uri': {'key': 'dockerImageUri', 'type': 'str'}, + } + + def __init__(self, *, docker_image_uri: str, platform=None, **kwargs) -> None: + super(DockerImage, self).__init__(platform=platform, **kwargs) + self.docker_image_uri = docker_image_uri + self.docker_specification_type = 'Image' + + +class DockerImagePlatform(Model): + """DockerImagePlatform. + + :param operating_system_type: The OS type the Environment. Possible values + include: 'Linux', 'Windows' + :type operating_system_type: str or + ~azure.mgmt.machinelearningservices.models.OperatingSystemType + """ + + _attribute_map = { + 'operating_system_type': {'key': 'operatingSystemType', 'type': 'str'}, + } + + def __init__(self, *, operating_system_type=None, **kwargs) -> None: + super(DockerImagePlatform, self).__init__(**kwargs) + self.operating_system_type = operating_system_type + + +class EncryptionProperty(Model): + """EncryptionProperty. + + All required parameters must be populated in order to send to Azure. + + :param status: Required. Indicates whether or not the encryption is + enabled for the workspace. Possible values include: 'Enabled', 'Disabled' + :type status: str or + ~azure.mgmt.machinelearningservices.models.EncryptionStatus + :param identity: The identity that will be used to access the key vault + for encryption at rest. + :type identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk + :param key_vault_properties: Required. Customer Key vault properties. + :type key_vault_properties: + ~azure.mgmt.machinelearningservices.models.KeyVaultProperties + """ + + _validation = { + 'status': {'required': True}, + 'key_vault_properties': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityForCmk'}, + 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'}, + } + + def __init__(self, *, status, key_vault_properties, identity=None, **kwargs) -> None: + super(EncryptionProperty, self).__init__(**kwargs) + self.status = status + self.identity = identity + self.key_vault_properties = key_vault_properties + + +class EndpointAuthKeys(Model): + """Keys for endpoint authentication. + + :param primary_key: The primary key. + :type primary_key: str + :param secondary_key: The secondary key. + :type secondary_key: str + """ + + _attribute_map = { + 'primary_key': {'key': 'primaryKey', 'type': 'str'}, + 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, + } + + def __init__(self, *, primary_key: str=None, secondary_key: str=None, **kwargs) -> None: + super(EndpointAuthKeys, self).__init__(**kwargs) + self.primary_key = primary_key + self.secondary_key = secondary_key + + +class EndpointAuthToken(Model): + """Service Token. + + :param access_token: Access token. + :type access_token: str + :param expiry_time_utc: Access token expiry time (UTC). + :type expiry_time_utc: long + :param refresh_after_time_utc: Refresh access token after time (UTC). + :type refresh_after_time_utc: long + :param token_type: Access token type. + :type token_type: str + """ + + _attribute_map = { + 'access_token': {'key': 'accessToken', 'type': 'str'}, + 'expiry_time_utc': {'key': 'expiryTimeUtc', 'type': 'long'}, + 'refresh_after_time_utc': {'key': 'refreshAfterTimeUtc', 'type': 'long'}, + 'token_type': {'key': 'tokenType', 'type': 'str'}, + } + + def __init__(self, *, access_token: str=None, expiry_time_utc: int=None, refresh_after_time_utc: int=None, token_type: str=None, **kwargs) -> None: + super(EndpointAuthToken, self).__init__(**kwargs) + self.access_token = access_token + self.expiry_time_utc = expiry_time_utc + self.refresh_after_time_utc = refresh_after_time_utc + self.token_type = token_type + + +class EnvironmentContainer(Model): + """Container for environment specification versions. + + :param description: The asset description text. + :type description: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, description: str=None, properties=None, tags=None, **kwargs) -> None: + super(EnvironmentContainer, self).__init__(**kwargs) + self.description = description + self.properties = properties + self.tags = tags + + +class EnvironmentContainerResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'EnvironmentContainer'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(EnvironmentContainerResource, self).__init__(**kwargs) + self.properties = properties + self.system_data = None + + +class EnvironmentSpecificationVersion(Model): + """Environment specification version details. + . + + Variables are only populated by the server, and will be ignored when + sending a request. + + :param conda_file: Standard configuration file used by Conda that lets you + install any kind of package, including Python, R, and C/C++ packages. + + :type conda_file: str + :param description: The asset description text. + :type description: str + :param docker: Configuration settings for Docker. + :type docker: + ~azure.mgmt.machinelearningservices.models.DockerSpecification + :ivar environment_specification_type: Environment specification is either + user managed or curated by the Azure ML service + . Possible values include: 'Curated', 'UserCreated' + :vartype environment_specification_type: str or + ~azure.mgmt.machinelearningservices.models.EnvironmentSpecificationType + :param inference_container_properties: Defines configuration specific to + inference. + :type inference_container_properties: + ~azure.mgmt.machinelearningservices.models.InferenceContainerProperties + :param is_anonymous: If the name version are system generated (anonymous + registration). + :type is_anonymous: bool + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _validation = { + 'environment_specification_type': {'readonly': True}, + } + + _attribute_map = { + 'conda_file': {'key': 'condaFile', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'docker': {'key': 'docker', 'type': 'DockerSpecification'}, + 'environment_specification_type': {'key': 'environmentSpecificationType', 'type': 'str'}, + 'inference_container_properties': {'key': 'inferenceContainerProperties', 'type': 'InferenceContainerProperties'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, conda_file: str=None, description: str=None, docker=None, inference_container_properties=None, is_anonymous: bool=None, properties=None, tags=None, **kwargs) -> None: + super(EnvironmentSpecificationVersion, self).__init__(**kwargs) + self.conda_file = conda_file + self.description = description + self.docker = docker + self.environment_specification_type = None + self.inference_container_properties = inference_container_properties + self.is_anonymous = is_anonymous + self.properties = properties + self.tags = tags + + +class EnvironmentSpecificationVersionResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.EnvironmentSpecificationVersion + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'EnvironmentSpecificationVersion'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(EnvironmentSpecificationVersionResource, self).__init__(**kwargs) + self.properties = properties + self.system_data = None + + +class ErrorAdditionalInfo(Model): + """The resource management error additional info. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: object + """ + + _validation = { + 'type': {'readonly': True}, + 'info': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__(self, **kwargs) -> None: + super(ErrorAdditionalInfo, self).__init__(**kwargs) + self.type = None + self.info = None + + +class ErrorDetail(Model): + """The error detail. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: + list[~azure.mgmt.machinelearningservices.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: + list[~azure.mgmt.machinelearningservices.models.ErrorAdditionalInfo] + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'target': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, + } + + def __init__(self, **kwargs) -> None: + super(ErrorDetail, self).__init__(**kwargs) + self.code = None + self.message = None + self.target = None + self.details = None + self.additional_info = None + + +class ErrorResponse(Model): + """Error response. + + Common error response for all Azure Resource Manager APIs to return error + details for failed operations. (This also follows the OData error response + format.). + + :param error: The error object. + :type error: ~azure.mgmt.machinelearningservices.models.ErrorDetail + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorDetail'}, + } + + def __init__(self, *, error=None, **kwargs) -> None: + super(ErrorResponse, self).__init__(**kwargs) + self.error = error + + +class ErrorResponseException(HttpOperationError): + """Server responsed with exception of type: 'ErrorResponse'. + + :param deserialize: A deserializer + :param response: Server response to be deserialized. + """ + + def __init__(self, deserialize, response, *args): + + super(ErrorResponseException, self).__init__(deserialize, response, 'ErrorResponse', *args) + + +class EstimatedVMPrice(Model): + """The estimated price info for using a VM of a particular OS type, tier, etc. + + All required parameters must be populated in order to send to Azure. + + :param retail_price: Required. Retail price. The price charged for using + the VM. + :type retail_price: float + :param os_type: Required. OS type. Operating system type used by the VM. + Possible values include: 'Linux', 'Windows' + :type os_type: str or + ~azure.mgmt.machinelearningservices.models.VMPriceOSType + :param vm_tier: Required. VM tier. The type of the VM. Possible values + include: 'Standard', 'LowPriority', 'Spot' + :type vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier + """ + + _validation = { + 'retail_price': {'required': True}, + 'os_type': {'required': True}, + 'vm_tier': {'required': True}, + } + + _attribute_map = { + 'retail_price': {'key': 'retailPrice', 'type': 'float'}, + 'os_type': {'key': 'osType', 'type': 'str'}, + 'vm_tier': {'key': 'vmTier', 'type': 'str'}, + } + + def __init__(self, *, retail_price: float, os_type, vm_tier, **kwargs) -> None: + super(EstimatedVMPrice, self).__init__(**kwargs) + self.retail_price = retail_price + self.os_type = os_type + self.vm_tier = vm_tier + + +class EstimatedVMPrices(Model): + """The estimated price info for using a VM. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar billing_currency: Required. Billing currency. Three lettered code + specifying the currency of the VM price. Example: USD. Default value: + "USD" . + :vartype billing_currency: str + :ivar unit_of_measure: Required. Unit of time measure. The unit of time + measurement for the specified VM price. Example: OneHour. Default value: + "OneHour" . + :vartype unit_of_measure: str + :param values: Required. List of estimated VM prices. The list of + estimated prices for using a VM of a particular OS type, tier, etc. + :type values: + list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] + """ + + _validation = { + 'billing_currency': {'required': True, 'constant': True}, + 'unit_of_measure': {'required': True, 'constant': True}, + 'values': {'required': True}, + } + + _attribute_map = { + 'billing_currency': {'key': 'billingCurrency', 'type': 'str'}, + 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[EstimatedVMPrice]'}, + } + + billing_currency = "USD" + + unit_of_measure = "OneHour" + + def __init__(self, *, values, **kwargs) -> None: + super(EstimatedVMPrices, self).__init__(**kwargs) + self.values = values + + +class FlavorData(Model): + """FlavorData. + + :param data: Model flavor-specific data. + :type data: dict[str, str] + """ + + _attribute_map = { + 'data': {'key': 'data', 'type': '{str}'}, + } + + def __init__(self, *, data=None, **kwargs) -> None: + super(FlavorData, self).__init__(**kwargs) + self.data = data + + +class GlusterFsContents(DatastoreContents): + """GlusterFs datastore configuration. + + All required parameters must be populated in order to send to Azure. + + :param contents_type: Required. Constant filled by server. + :type contents_type: str + :param server_address: Required. GlusterFS server address (can be the IP + address or server name). + :type server_address: str + :param volume_name: Required. GlusterFS volume name. + :type volume_name: str + """ + + _validation = { + 'contents_type': {'required': True}, + 'server_address': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'volume_name': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'contents_type': {'key': 'contentsType', 'type': 'str'}, + 'server_address': {'key': 'serverAddress', 'type': 'str'}, + 'volume_name': {'key': 'volumeName', 'type': 'str'}, + } + + def __init__(self, *, server_address: str, volume_name: str, **kwargs) -> None: + super(GlusterFsContents, self).__init__(**kwargs) + self.server_address = server_address + self.volume_name = volume_name + self.contents_type = 'GlusterFs' + + +class HDInsight(Compute): + """A HDInsight compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_location: Location for the underlying compute + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values + are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible + values include: 'Unknown', 'Updating', 'Creating', 'Deleting', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: datetime + :param resource_id: ARM resource id of the underlying compute + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning + :vartype provisioning_errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned + by user and brought from outside if true, or machine learning service + provisioned it if false. + :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool + :param compute_type: Required. Constant filled by server. + :type compute_type: str + :param properties: + :type properties: + ~azure.mgmt.machinelearningservices.models.HDInsightProperties + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, + } + + def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, disable_local_auth: bool=None, properties=None, **kwargs) -> None: + super(HDInsight, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs) + self.properties = properties + self.compute_type = 'HDInsight' + + +class HDInsightProperties(Model): + """HDInsightProperties. + + :param ssh_port: Port open for ssh connections on the master node of the + cluster. + :type ssh_port: int + :param address: Public IP address of the master node of the cluster. + :type address: str + :param administrator_account: Admin credentials for master node of the + cluster + :type administrator_account: + ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + """ + + _attribute_map = { + 'ssh_port': {'key': 'sshPort', 'type': 'int'}, + 'address': {'key': 'address', 'type': 'str'}, + 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + } + + def __init__(self, *, ssh_port: int=None, address: str=None, administrator_account=None, **kwargs) -> None: + super(HDInsightProperties, self).__init__(**kwargs) + self.ssh_port = ssh_port + self.address = address + self.administrator_account = administrator_account + + +class IdAssetReference(AssetReferenceBase): + """Reference to an asset via its ARM resource ID. + + All required parameters must be populated in order to send to Azure. + + :param reference_type: Required. Constant filled by server. + :type reference_type: str + :param asset_id: Required. ARM resource ID of the asset. + :type asset_id: str + """ + + _validation = { + 'reference_type': {'required': True}, + 'asset_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + 'asset_id': {'key': 'assetId', 'type': 'str'}, + } + + def __init__(self, *, asset_id: str, **kwargs) -> None: + super(IdAssetReference, self).__init__(**kwargs) + self.asset_id = asset_id + self.reference_type = 'Id' + + +class Identity(Model): + """Identity for the resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type: The identity type. Possible values include: 'SystemAssigned', + 'SystemAssigned,UserAssigned', 'UserAssigned', 'None' + :type type: str or + ~azure.mgmt.machinelearningservices.models.ResourceIdentityType + :param user_assigned_identities: The user assigned identities associated + with the resource. + :type user_assigned_identities: dict[str, + ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'ResourceIdentityType'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + } + + def __init__(self, *, type=None, user_assigned_identities=None, **kwargs) -> None: + super(Identity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + self.user_assigned_identities = user_assigned_identities + + +class IdentityForCmk(Model): + """Identity that will be used to access key vault for encryption at rest. + + :param user_assigned_identity: The ArmId of the user assigned identity + that will be used to access the customer managed key vault + :type user_assigned_identity: str + """ + + _attribute_map = { + 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + } + + def __init__(self, *, user_assigned_identity: str=None, **kwargs) -> None: + super(IdentityForCmk, self).__init__(**kwargs) + self.user_assigned_identity = user_assigned_identity + + +class InferenceContainerProperties(Model): + """InferenceContainerProperties. + + :param liveness_route: The route to check the liveness of the inference + server container. + :type liveness_route: ~azure.mgmt.machinelearningservices.models.Route + :param readiness_route: The route to check the readiness of the inference + server container. + :type readiness_route: ~azure.mgmt.machinelearningservices.models.Route + :param scoring_route: The port to send the scoring requests to, within the + inference server container. + :type scoring_route: ~azure.mgmt.machinelearningservices.models.Route + """ + + _attribute_map = { + 'liveness_route': {'key': 'livenessRoute', 'type': 'Route'}, + 'readiness_route': {'key': 'readinessRoute', 'type': 'Route'}, + 'scoring_route': {'key': 'scoringRoute', 'type': 'Route'}, + } + + def __init__(self, *, liveness_route=None, readiness_route=None, scoring_route=None, **kwargs) -> None: + super(InferenceContainerProperties, self).__init__(**kwargs) + self.liveness_route = liveness_route + self.readiness_route = readiness_route + self.scoring_route = scoring_route + + +class InputDataBinding(Model): + """InputDataBinding. + + :param data_id: ARM resource ID of the registered dataVersion. + :type data_id: str + :param mode: Mechanism for accessing the data artifact. Possible values + include: 'Mount', 'Download', 'Upload' + :type mode: str or + ~azure.mgmt.machinelearningservices.models.DataBindingMode + :param path_on_compute: Location of data inside the container process. + :type path_on_compute: str + """ + + _attribute_map = { + 'data_id': {'key': 'dataId', 'type': 'str'}, + 'mode': {'key': 'mode', 'type': 'str'}, + 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, + } + + def __init__(self, *, data_id: str=None, mode=None, path_on_compute: str=None, **kwargs) -> None: + super(InputDataBinding, self).__init__(**kwargs) + self.data_id = data_id + self.mode = mode + self.path_on_compute = path_on_compute + + +class JobBaseResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.JobBase + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'JobBase'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(JobBaseResource, self).__init__(**kwargs) + self.properties = properties + self.system_data = None + + +class JobEndpoint(Model): + """Job endpoint definition. + + :param endpoint: Url for endpoint. + :type endpoint: str + :param job_endpoint_type: Endpoint type. + :type job_endpoint_type: str + :param port: Port for endpoint. + :type port: int + :param properties: Additional properties to set on the endpoint. + :type properties: dict[str, str] + """ + + _attribute_map = { + 'endpoint': {'key': 'endpoint', 'type': 'str'}, + 'job_endpoint_type': {'key': 'jobEndpointType', 'type': 'str'}, + 'port': {'key': 'port', 'type': 'int'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + } + + def __init__(self, *, endpoint: str=None, job_endpoint_type: str=None, port: int=None, properties=None, **kwargs) -> None: + super(JobEndpoint, self).__init__(**kwargs) + self.endpoint = endpoint + self.job_endpoint_type = job_endpoint_type + self.port = port + self.properties = properties + + +class JobOutput(Model): + """Job output definition container information on where to find job + output/logs. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar datastore_id: ARM ID of the datastore where the job logs and + artifacts are stored, or null for the default container ("azureml") in the + workspace's storage account. + :vartype datastore_id: str + :ivar path: Path within the datastore to the job logs and artifacts. + :vartype path: str + """ + + _validation = { + 'datastore_id': {'readonly': True}, + 'path': {'readonly': True}, + } + + _attribute_map = { + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(JobOutput, self).__init__(**kwargs) + self.datastore_id = None + self.path = None + + +class OnlineDeployment(Model): + """OnlineDeployment. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: K8sOnlineDeployment, ManagedOnlineDeployment + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param app_insights_enabled: If true, enables Application Insights + logging. + :type app_insights_enabled: bool + :param code_configuration: Code configuration for the endpoint deployment. + :type code_configuration: + ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :param description: Description of the endpoint deployment. + :type description: str + :param environment_id: ARM resource ID of the environment specification + for the endpoint deployment. + :type environment_id: str + :param environment_variables: Environment variables configuration for the + deployment. + :type environment_variables: dict[str, str] + :param liveness_probe: Deployment container liveness/readiness probe + configuration. + :type liveness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + :param model: Reference to the model asset for the endpoint deployment. + :type model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase + :param properties: Property dictionary. Properties can be added, but not + removed or altered. + :type properties: dict[str, str] + :ivar provisioning_state: Provisioning state for the endpoint deployment. + Possible values include: 'Creating', 'Deleting', 'Scaling', 'Updating', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :param request_settings: Online deployment scoring requests configuration. + :type request_settings: + ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :param scale_settings: Online deployment scaling configuration. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :param endpoint_compute_type: Required. Constant filled by server. + :type endpoint_compute_type: str + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'endpoint_compute_type': {'required': True}, + } + + _attribute_map = { + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'model': {'key': 'model', 'type': 'AssetReferenceBase'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + } + + _subtype_map = { + 'endpoint_compute_type': {'K8S': 'K8sOnlineDeployment', 'Managed': 'ManagedOnlineDeployment'} + } + + def __init__(self, *, app_insights_enabled: bool=None, code_configuration=None, description: str=None, environment_id: str=None, environment_variables=None, liveness_probe=None, model=None, properties=None, request_settings=None, scale_settings=None, **kwargs) -> None: + super(OnlineDeployment, self).__init__(**kwargs) + self.app_insights_enabled = app_insights_enabled + self.code_configuration = code_configuration + self.description = description + self.environment_id = environment_id + self.environment_variables = environment_variables + self.liveness_probe = liveness_probe + self.model = model + self.properties = properties + self.provisioning_state = None + self.request_settings = request_settings + self.scale_settings = scale_settings + self.endpoint_compute_type = None + + +class K8sOnlineDeployment(OnlineDeployment): + """K8sOnlineDeployment. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param app_insights_enabled: If true, enables Application Insights + logging. + :type app_insights_enabled: bool + :param code_configuration: Code configuration for the endpoint deployment. + :type code_configuration: + ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :param description: Description of the endpoint deployment. + :type description: str + :param environment_id: ARM resource ID of the environment specification + for the endpoint deployment. + :type environment_id: str + :param environment_variables: Environment variables configuration for the + deployment. + :type environment_variables: dict[str, str] + :param liveness_probe: Deployment container liveness/readiness probe + configuration. + :type liveness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + :param model: Reference to the model asset for the endpoint deployment. + :type model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase + :param properties: Property dictionary. Properties can be added, but not + removed or altered. + :type properties: dict[str, str] + :ivar provisioning_state: Provisioning state for the endpoint deployment. + Possible values include: 'Creating', 'Deleting', 'Scaling', 'Updating', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :param request_settings: Online deployment scoring requests configuration. + :type request_settings: + ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :param scale_settings: Online deployment scaling configuration. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :param endpoint_compute_type: Required. Constant filled by server. + :type endpoint_compute_type: str + :param container_resource_requirements: Resource requirements for each + container instance within an online deployment. + :type container_resource_requirements: + ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'endpoint_compute_type': {'required': True}, + } + + _attribute_map = { + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'model': {'key': 'model', 'type': 'AssetReferenceBase'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'}, + } + + def __init__(self, *, app_insights_enabled: bool=None, code_configuration=None, description: str=None, environment_id: str=None, environment_variables=None, liveness_probe=None, model=None, properties=None, request_settings=None, scale_settings=None, container_resource_requirements=None, **kwargs) -> None: + super(K8sOnlineDeployment, self).__init__(app_insights_enabled=app_insights_enabled, code_configuration=code_configuration, description=description, environment_id=environment_id, environment_variables=environment_variables, liveness_probe=liveness_probe, model=model, properties=properties, request_settings=request_settings, scale_settings=scale_settings, **kwargs) + self.container_resource_requirements = container_resource_requirements + self.endpoint_compute_type = 'K8S' + + +class KeyVaultProperties(Model): + """KeyVaultProperties. + + All required parameters must be populated in order to send to Azure. + + :param key_vault_arm_id: Required. The ArmId of the keyVault where the + customer owned encryption key is present. + :type key_vault_arm_id: str + :param key_identifier: Required. Key vault uri to access the encryption + key. + :type key_identifier: str + :param identity_client_id: For future use - The client id of the identity + which will be used to access key vault. + :type identity_client_id: str + """ + + _validation = { + 'key_vault_arm_id': {'required': True}, + 'key_identifier': {'required': True}, + } + + _attribute_map = { + 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'}, + 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, + 'identity_client_id': {'key': 'identityClientId', 'type': 'str'}, + } + + def __init__(self, *, key_vault_arm_id: str, key_identifier: str, identity_client_id: str=None, **kwargs) -> None: + super(KeyVaultProperties, self).__init__(**kwargs) + self.key_vault_arm_id = key_vault_arm_id + self.key_identifier = key_identifier + self.identity_client_id = identity_client_id + + +class LabelCategory(Model): + """Label category definition. + + :param allow_multi_select: Indicates whether it is allowed to select + multiple classes in this category. + :type allow_multi_select: bool + :param classes: Dictionary of label classes in this category. + :type classes: dict[str, + ~azure.mgmt.machinelearningservices.models.LabelClass] + :param display_name: Display name of the label category. + :type display_name: str + """ + + _attribute_map = { + 'allow_multi_select': {'key': 'allowMultiSelect', 'type': 'bool'}, + 'classes': {'key': 'classes', 'type': '{LabelClass}'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + } + + def __init__(self, *, allow_multi_select: bool=None, classes=None, display_name: str=None, **kwargs) -> None: + super(LabelCategory, self).__init__(**kwargs) + self.allow_multi_select = allow_multi_select + self.classes = classes + self.display_name = display_name + + +class LabelClass(Model): + """Label class definition. + + :param display_name: Display name of the label class. + :type display_name: str + :param subclasses: Dictionary of subclasses of the label class. + :type subclasses: dict[str, + ~azure.mgmt.machinelearningservices.models.LabelClass] + """ + + _attribute_map = { + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'subclasses': {'key': 'subclasses', 'type': '{LabelClass}'}, + } + + def __init__(self, *, display_name: str=None, subclasses=None, **kwargs) -> None: + super(LabelClass, self).__init__(**kwargs) + self.display_name = display_name + self.subclasses = subclasses + + +class LabelingDatasetConfiguration(Model): + """Labeling dataset configuration definition. + + :param asset_name: Name of the data asset to perform labeling. + :type asset_name: str + :param dataset_version: AML dataset version. + :type dataset_version: str + :param incremental_dataset_refresh_enabled: Indicates whether to enable + incremental dataset refresh. + :type incremental_dataset_refresh_enabled: bool + """ + + _attribute_map = { + 'asset_name': {'key': 'assetName', 'type': 'str'}, + 'dataset_version': {'key': 'datasetVersion', 'type': 'str'}, + 'incremental_dataset_refresh_enabled': {'key': 'incrementalDatasetRefreshEnabled', 'type': 'bool'}, + } + + def __init__(self, *, asset_name: str=None, dataset_version: str=None, incremental_dataset_refresh_enabled: bool=None, **kwargs) -> None: + super(LabelingDatasetConfiguration, self).__init__(**kwargs) + self.asset_name = asset_name + self.dataset_version = dataset_version + self.incremental_dataset_refresh_enabled = incremental_dataset_refresh_enabled + + +class LabelingJob(Model): + """Labeling job definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar created_time_utc: Created time of the job in UTC timezone. + :vartype created_time_utc: datetime + :param dataset_configuration: Configuration of dataset used in the job. + :type dataset_configuration: + ~azure.mgmt.machinelearningservices.models.LabelingDatasetConfiguration + :param description: The asset description text. + :type description: str + :ivar interaction_endpoints: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of + FileStreamObject. + :vartype interaction_endpoints: dict[str, + ~azure.mgmt.machinelearningservices.models.JobEndpoint] + :param job_instructions: Labeling instructions of the job. + :type job_instructions: + ~azure.mgmt.machinelearningservices.models.LabelingJobInstructions + :param job_type: Required. Specifies the type of job. This field should + always be set to "Labeling". Possible values include: 'Command', 'Sweep', + 'Labeling' + :type job_type: str or ~azure.mgmt.machinelearningservices.models.JobType + :param label_categories: Label categories of the job. + :type label_categories: dict[str, + ~azure.mgmt.machinelearningservices.models.LabelCategory] + :param labeling_job_media_properties: Media type specific properties in + the job. + :type labeling_job_media_properties: + ~azure.mgmt.machinelearningservices.models.LabelingJobMediaProperties + :param ml_assist_configuration: Configuration of MLAssist feature in the + job. + :type ml_assist_configuration: + ~azure.mgmt.machinelearningservices.models.MLAssistConfiguration + :ivar progress_metrics: Progress metrics of the job. + :vartype progress_metrics: + ~azure.mgmt.machinelearningservices.models.ProgressMetrics + :ivar project_id: Internal id of the job(Previously called project). + :vartype project_id: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :ivar provisioning_state: Specifies the labeling job provisioning state. + Possible values include: 'Succeeded', 'Failed', 'Canceled', 'InProgress' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.JobProvisioningState + :ivar status: Status of the job. Possible values include: 'NotStarted', + 'Starting', 'Provisioning', 'Preparing', 'Queued', 'Running', + 'Finalizing', 'CancelRequested', 'Completed', 'Failed', 'Canceled', + 'NotResponding', 'Paused', 'Unknown' + :vartype status: str or + ~azure.mgmt.machinelearningservices.models.JobStatus + :ivar status_messages: Status messages of the job. + :vartype status_messages: + list[~azure.mgmt.machinelearningservices.models.StatusMessage] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _validation = { + 'created_time_utc': {'readonly': True}, + 'interaction_endpoints': {'readonly': True}, + 'job_type': {'required': True}, + 'progress_metrics': {'readonly': True}, + 'project_id': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'status': {'readonly': True}, + 'status_messages': {'readonly': True}, + } + + _attribute_map = { + 'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'}, + 'dataset_configuration': {'key': 'datasetConfiguration', 'type': 'LabelingDatasetConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, + 'job_instructions': {'key': 'jobInstructions', 'type': 'LabelingJobInstructions'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'label_categories': {'key': 'labelCategories', 'type': '{LabelCategory}'}, + 'labeling_job_media_properties': {'key': 'labelingJobMediaProperties', 'type': 'LabelingJobMediaProperties'}, + 'ml_assist_configuration': {'key': 'mlAssistConfiguration', 'type': 'MLAssistConfiguration'}, + 'progress_metrics': {'key': 'progressMetrics', 'type': 'ProgressMetrics'}, + 'project_id': {'key': 'projectId', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'status_messages': {'key': 'statusMessages', 'type': '[StatusMessage]'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, job_type, dataset_configuration=None, description: str=None, job_instructions=None, label_categories=None, labeling_job_media_properties=None, ml_assist_configuration=None, properties=None, tags=None, **kwargs) -> None: + super(LabelingJob, self).__init__(**kwargs) + self.created_time_utc = None + self.dataset_configuration = dataset_configuration + self.description = description + self.interaction_endpoints = None + self.job_instructions = job_instructions + self.job_type = job_type + self.label_categories = label_categories + self.labeling_job_media_properties = labeling_job_media_properties + self.ml_assist_configuration = ml_assist_configuration + self.progress_metrics = None + self.project_id = None + self.properties = properties + self.provisioning_state = None + self.status = None + self.status_messages = None + self.tags = tags + + +class LabelingJobMediaProperties(Model): + """Properties of a labeling job. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LabelingJobImageProperties, LabelingJobTextProperties + + All required parameters must be populated in order to send to Azure. + + :param media_type: Required. Constant filled by server. + :type media_type: str + """ + + _validation = { + 'media_type': {'required': True}, + } + + _attribute_map = { + 'media_type': {'key': 'mediaType', 'type': 'str'}, + } + + _subtype_map = { + 'media_type': {'Image': 'LabelingJobImageProperties', 'Text': 'LabelingJobTextProperties'} + } + + def __init__(self, **kwargs) -> None: + super(LabelingJobMediaProperties, self).__init__(**kwargs) + self.media_type = None + + +class LabelingJobImageProperties(LabelingJobMediaProperties): + """Properties of a labeling job for image data. + + All required parameters must be populated in order to send to Azure. + + :param media_type: Required. Constant filled by server. + :type media_type: str + :param annotation_type: Annotation type of image labeling job. Possible + values include: 'Classification', 'BoundingBox', 'InstanceSegmentation' + :type annotation_type: str or + ~azure.mgmt.machinelearningservices.models.ImageAnnotationType + """ + + _validation = { + 'media_type': {'required': True}, + } + + _attribute_map = { + 'media_type': {'key': 'mediaType', 'type': 'str'}, + 'annotation_type': {'key': 'annotationType', 'type': 'str'}, + } + + def __init__(self, *, annotation_type=None, **kwargs) -> None: + super(LabelingJobImageProperties, self).__init__(**kwargs) + self.annotation_type = annotation_type + self.media_type = 'Image' + + +class LabelingJobInstructions(Model): + """Instructions for labeling job. + + :param uri: The link to a page with detailed labeling instructions for + labelers. + :type uri: str + """ + + _attribute_map = { + 'uri': {'key': 'uri', 'type': 'str'}, + } + + def __init__(self, *, uri: str=None, **kwargs) -> None: + super(LabelingJobInstructions, self).__init__(**kwargs) + self.uri = uri + + +class LabelingJobResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.LabelingJob + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'LabelingJob'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(LabelingJobResource, self).__init__(**kwargs) + self.properties = properties + self.system_data = None + + +class LabelingJobTextProperties(LabelingJobMediaProperties): + """Properties of a labeling job for text data. + + All required parameters must be populated in order to send to Azure. + + :param media_type: Required. Constant filled by server. + :type media_type: str + :param annotation_type: Annotation type of text labeling job. Possible + values include: 'Classification' + :type annotation_type: str or + ~azure.mgmt.machinelearningservices.models.TextAnnotationType + """ + + _validation = { + 'media_type': {'required': True}, + } + + _attribute_map = { + 'media_type': {'key': 'mediaType', 'type': 'str'}, + 'annotation_type': {'key': 'annotationType', 'type': 'str'}, + } + + def __init__(self, *, annotation_type=None, **kwargs) -> None: + super(LabelingJobTextProperties, self).__init__(**kwargs) + self.annotation_type = annotation_type + self.media_type = 'Text' + + +class LinkedInfo(Model): + """Information about a datastore origin, if linked. + + :param linked_id: Linked service ID. + :type linked_id: str + :param linked_resource_name: Linked service resource name. + :type linked_resource_name: str + :param origin: Type of the linked service. Possible values include: + 'Synapse' + :type origin: str or ~azure.mgmt.machinelearningservices.models.OriginType + """ + + _attribute_map = { + 'linked_id': {'key': 'linkedId', 'type': 'str'}, + 'linked_resource_name': {'key': 'linkedResourceName', 'type': 'str'}, + 'origin': {'key': 'origin', 'type': 'str'}, + } + + def __init__(self, *, linked_id: str=None, linked_resource_name: str=None, origin=None, **kwargs) -> None: + super(LinkedInfo, self).__init__(**kwargs) + self.linked_id = linked_id + self.linked_resource_name = linked_resource_name + self.origin = origin + + +class ListNotebookKeysResult(Model): + """ListNotebookKeysResult. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar primary_access_key: + :vartype primary_access_key: str + :ivar secondary_access_key: + :vartype secondary_access_key: str + """ + + _validation = { + 'primary_access_key': {'readonly': True}, + 'secondary_access_key': {'readonly': True}, + } + + _attribute_map = { + 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'}, + 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ListNotebookKeysResult, self).__init__(**kwargs) + self.primary_access_key = None + self.secondary_access_key = None + + +class ListStorageAccountKeysResult(Model): + """ListStorageAccountKeysResult. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar user_storage_key: + :vartype user_storage_key: str + """ + + _validation = { + 'user_storage_key': {'readonly': True}, + } + + _attribute_map = { + 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ListStorageAccountKeysResult, self).__init__(**kwargs) + self.user_storage_key = None + + +class ListWorkspaceKeysResult(Model): + """ListWorkspaceKeysResult. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar user_storage_key: + :vartype user_storage_key: str + :ivar user_storage_resource_id: + :vartype user_storage_resource_id: str + :ivar app_insights_instrumentation_key: + :vartype app_insights_instrumentation_key: str + :ivar container_registry_credentials: + :vartype container_registry_credentials: + ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult + :ivar notebook_access_keys: + :vartype notebook_access_keys: + ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult + """ + + _validation = { + 'user_storage_key': {'readonly': True}, + 'user_storage_resource_id': {'readonly': True}, + 'app_insights_instrumentation_key': {'readonly': True}, + 'container_registry_credentials': {'readonly': True}, + 'notebook_access_keys': {'readonly': True}, + } + + _attribute_map = { + 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, + 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'}, + 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, + 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'}, + 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'ListNotebookKeysResult'}, + } + + def __init__(self, **kwargs) -> None: + super(ListWorkspaceKeysResult, self).__init__(**kwargs) + self.user_storage_key = None + self.user_storage_resource_id = None + self.app_insights_instrumentation_key = None + self.container_registry_credentials = None + self.notebook_access_keys = None + + +class ManagedIdentity(IdentityConfiguration): + """Managed identity configuration. + + All required parameters must be populated in order to send to Azure. + + :param identity_type: Required. Constant filled by server. + :type identity_type: str + :param client_id: Specifies a user-assigned identity by client ID. For + system-assigned, do not set this field. + :type client_id: str + :param object_id: Specifies a user-assigned identity by object ID. For + system-assigned, do not set this field. + :type object_id: str + :param resource_id: Specifies a user-assigned identity by ARM resource ID. + For system-assigned, do not set this field. + :type resource_id: str + """ + + _validation = { + 'identity_type': {'required': True}, + } + + _attribute_map = { + 'identity_type': {'key': 'identityType', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'object_id': {'key': 'objectId', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__(self, *, client_id: str=None, object_id: str=None, resource_id: str=None, **kwargs) -> None: + super(ManagedIdentity, self).__init__(**kwargs) + self.client_id = client_id + self.object_id = object_id + self.resource_id = resource_id + self.identity_type = 'Managed' + + +class ManagedOnlineDeployment(OnlineDeployment): + """ManagedOnlineDeployment. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param app_insights_enabled: If true, enables Application Insights + logging. + :type app_insights_enabled: bool + :param code_configuration: Code configuration for the endpoint deployment. + :type code_configuration: + ~azure.mgmt.machinelearningservices.models.CodeConfiguration + :param description: Description of the endpoint deployment. + :type description: str + :param environment_id: ARM resource ID of the environment specification + for the endpoint deployment. + :type environment_id: str + :param environment_variables: Environment variables configuration for the + deployment. + :type environment_variables: dict[str, str] + :param liveness_probe: Deployment container liveness/readiness probe + configuration. + :type liveness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + :param model: Reference to the model asset for the endpoint deployment. + :type model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase + :param properties: Property dictionary. Properties can be added, but not + removed or altered. + :type properties: dict[str, str] + :ivar provisioning_state: Provisioning state for the endpoint deployment. + Possible values include: 'Creating', 'Deleting', 'Scaling', 'Updating', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState + :param request_settings: Online deployment scoring requests configuration. + :type request_settings: + ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :param scale_settings: Online deployment scaling configuration. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :param endpoint_compute_type: Required. Constant filled by server. + :type endpoint_compute_type: str + :param instance_type: Compute instance type. + :type instance_type: str + :param readiness_probe: Deployment container liveness/readiness probe + configuration. + :type readiness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + 'endpoint_compute_type': {'required': True}, + } + + _attribute_map = { + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + 'description': {'key': 'description', 'type': 'str'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'model': {'key': 'model', 'type': 'AssetReferenceBase'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + 'instance_type': {'key': 'instanceType', 'type': 'str'}, + 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, + } + + def __init__(self, *, app_insights_enabled: bool=None, code_configuration=None, description: str=None, environment_id: str=None, environment_variables=None, liveness_probe=None, model=None, properties=None, request_settings=None, scale_settings=None, instance_type: str=None, readiness_probe=None, **kwargs) -> None: + super(ManagedOnlineDeployment, self).__init__(app_insights_enabled=app_insights_enabled, code_configuration=code_configuration, description=description, environment_id=environment_id, environment_variables=environment_variables, liveness_probe=liveness_probe, model=model, properties=properties, request_settings=request_settings, scale_settings=scale_settings, **kwargs) + self.instance_type = instance_type + self.readiness_probe = readiness_probe + self.endpoint_compute_type = 'Managed' + + +class ManualScaleSettings(OnlineScaleSettings): + """ManualScaleSettings. + + All required parameters must be populated in order to send to Azure. + + :param max_instances: Maximum number of instances for this deployment. + :type max_instances: int + :param min_instances: Minimum number of instances for this deployment. + :type min_instances: int + :param scale_type: Required. Constant filled by server. + :type scale_type: str + :param instance_count: Fixed number of instances for this deployment. + :type instance_count: int + """ + + _validation = { + 'scale_type': {'required': True}, + } + + _attribute_map = { + 'max_instances': {'key': 'maxInstances', 'type': 'int'}, + 'min_instances': {'key': 'minInstances', 'type': 'int'}, + 'scale_type': {'key': 'scaleType', 'type': 'str'}, + 'instance_count': {'key': 'instanceCount', 'type': 'int'}, + } + + def __init__(self, *, max_instances: int=None, min_instances: int=None, instance_count: int=None, **kwargs) -> None: + super(ManualScaleSettings, self).__init__(max_instances=max_instances, min_instances=min_instances, **kwargs) + self.instance_count = instance_count + self.scale_type = 'Manual' + + +class MedianStoppingPolicy(EarlyTerminationPolicy): + """Defines an early termination policy based on running averages of the + primary metric of all runs. + + All required parameters must be populated in order to send to Azure. + + :param delay_evaluation: Number of intervals by which to delay the first + evaluation. + :type delay_evaluation: int + :param evaluation_interval: Interval (number of runs) between policy + evaluations. + :type evaluation_interval: int + :param policy_type: Required. Constant filled by server. + :type policy_type: str + """ + + _validation = { + 'policy_type': {'required': True}, + } + + _attribute_map = { + 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, + 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, + 'policy_type': {'key': 'policyType', 'type': 'str'}, + } + + def __init__(self, *, delay_evaluation: int=None, evaluation_interval: int=None, **kwargs) -> None: + super(MedianStoppingPolicy, self).__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) + self.policy_type = 'MedianStopping' + + +class MLAssistConfiguration(Model): + """Labeling MLAssist configuration definition. + + :param inferencing_compute_binding: AML compute binding used in + inferencing. + :type inferencing_compute_binding: + ~azure.mgmt.machinelearningservices.models.ComputeConfiguration + :param ml_assist_enabled: Indicates whether MLAssist feature is enabled. + :type ml_assist_enabled: bool + :param training_compute_binding: AML compute binding used in training. + :type training_compute_binding: + ~azure.mgmt.machinelearningservices.models.ComputeConfiguration + """ + + _attribute_map = { + 'inferencing_compute_binding': {'key': 'inferencingComputeBinding', 'type': 'ComputeConfiguration'}, + 'ml_assist_enabled': {'key': 'mlAssistEnabled', 'type': 'bool'}, + 'training_compute_binding': {'key': 'trainingComputeBinding', 'type': 'ComputeConfiguration'}, + } + + def __init__(self, *, inferencing_compute_binding=None, ml_assist_enabled: bool=None, training_compute_binding=None, **kwargs) -> None: + super(MLAssistConfiguration, self).__init__(**kwargs) + self.inferencing_compute_binding = inferencing_compute_binding + self.ml_assist_enabled = ml_assist_enabled + self.training_compute_binding = training_compute_binding + + +class ModelContainer(Model): + """ModelContainer. + + :param description: The asset description text. + :type description: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, description: str=None, properties=None, tags=None, **kwargs) -> None: + super(ModelContainer, self).__init__(**kwargs) + self.description = description + self.properties = properties + self.tags = tags + + +class ModelContainerResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.ModelContainer + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ModelContainer'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(ModelContainerResource, self).__init__(**kwargs) + self.properties = properties + self.system_data = None + + +class ModelVersion(Model): + """Model asset version details. + + All required parameters must be populated in order to send to Azure. + + :param datastore_id: ARM resource ID of the datastore where the asset is + located. + :type datastore_id: str + :param description: The asset description text. + :type description: str + :param flavors: Mapping of model flavors to their properties. + :type flavors: dict[str, + ~azure.mgmt.machinelearningservices.models.FlavorData] + :param is_anonymous: If the name version are system generated (anonymous + registration). + :type is_anonymous: bool + :param path: Required. The path of the file/directory in the datastore. + :type path: str + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + """ + + _validation = { + 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'flavors': {'key': 'flavors', 'type': '{FlavorData}'}, + 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, + 'path': {'key': 'path', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, path: str, datastore_id: str=None, description: str=None, flavors=None, is_anonymous: bool=None, properties=None, tags=None, **kwargs) -> None: + super(ModelVersion, self).__init__(**kwargs) + self.datastore_id = datastore_id + self.description = description + self.flavors = flavors + self.is_anonymous = is_anonymous + self.path = path + self.properties = properties + self.tags = tags + + +class ModelVersionResource(Resource): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param properties: Required. Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.ModelVersion + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ModelVersion'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(ModelVersionResource, self).__init__(**kwargs) + self.properties = properties + self.system_data = None + + +class Mpi(DistributionConfiguration): + """MPI distribution configuration. + + All required parameters must be populated in order to send to Azure. + + :param distribution_type: Required. Constant filled by server. + :type distribution_type: str + :param process_count_per_instance: Number of processes per MPI node. + :type process_count_per_instance: int + """ + + _validation = { + 'distribution_type': {'required': True}, + } + + _attribute_map = { + 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'}, + } + + def __init__(self, *, process_count_per_instance: int=None, **kwargs) -> None: + super(Mpi, self).__init__(**kwargs) + self.process_count_per_instance = process_count_per_instance + self.distribution_type = 'Mpi' + + +class NodeStateCounts(Model): + """Counts of various compute node states on the amlCompute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar idle_node_count: Idle node count. Number of compute nodes in idle + state. + :vartype idle_node_count: int + :ivar running_node_count: Running node count. Number of compute nodes + which are running jobs. + :vartype running_node_count: int + :ivar preparing_node_count: Preparing node count. Number of compute nodes + which are being prepared. + :vartype preparing_node_count: int + :ivar unusable_node_count: Unusable node count. Number of compute nodes + which are in unusable state. + :vartype unusable_node_count: int + :ivar leaving_node_count: Leaving node count. Number of compute nodes + which are leaving the amlCompute. + :vartype leaving_node_count: int + :ivar preempted_node_count: Preempted node count. Number of compute nodes + which are in preempted state. + :vartype preempted_node_count: int + """ + + _validation = { + 'idle_node_count': {'readonly': True}, + 'running_node_count': {'readonly': True}, + 'preparing_node_count': {'readonly': True}, + 'unusable_node_count': {'readonly': True}, + 'leaving_node_count': {'readonly': True}, + 'preempted_node_count': {'readonly': True}, + } + + _attribute_map = { + 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, + 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, + 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, + 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, + 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, + 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, + } + + def __init__(self, **kwargs) -> None: + super(NodeStateCounts, self).__init__(**kwargs) + self.idle_node_count = None + self.running_node_count = None + self.preparing_node_count = None + self.unusable_node_count = None + self.leaving_node_count = None + self.preempted_node_count = None + + +class NoneDatastoreCredentials(DatastoreCredentials): + """Empty/none datastore credentials. + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + :param secrets: Empty/none datastore secret. + :type secrets: + ~azure.mgmt.machinelearningservices.models.NoneDatastoreSecrets + """ + + _validation = { + 'credentials_type': {'required': True}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'NoneDatastoreSecrets'}, + } + + def __init__(self, *, secrets=None, **kwargs) -> None: + super(NoneDatastoreCredentials, self).__init__(**kwargs) + self.secrets = secrets + self.credentials_type = 'None' + + +class NoneDatastoreSecrets(DatastoreSecrets): + """Empty/none datastore secret. + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(NoneDatastoreSecrets, self).__init__(**kwargs) + self.secrets_type = 'None' + + +class NotebookAccessTokenResult(Model): + """NotebookAccessTokenResult. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar notebook_resource_id: + :vartype notebook_resource_id: str + :ivar host_name: + :vartype host_name: str + :ivar public_dns: + :vartype public_dns: str + :ivar access_token: + :vartype access_token: str + :ivar token_type: + :vartype token_type: str + :ivar expires_in: + :vartype expires_in: int + :ivar refresh_token: + :vartype refresh_token: str + :ivar scope: + :vartype scope: str + """ + + _validation = { + 'notebook_resource_id': {'readonly': True}, + 'host_name': {'readonly': True}, + 'public_dns': {'readonly': True}, + 'access_token': {'readonly': True}, + 'token_type': {'readonly': True}, + 'expires_in': {'readonly': True}, + 'refresh_token': {'readonly': True}, + 'scope': {'readonly': True}, + } + + _attribute_map = { + 'notebook_resource_id': {'key': 'notebookResourceId', 'type': 'str'}, + 'host_name': {'key': 'hostName', 'type': 'str'}, + 'public_dns': {'key': 'publicDns', 'type': 'str'}, + 'access_token': {'key': 'accessToken', 'type': 'str'}, + 'token_type': {'key': 'tokenType', 'type': 'str'}, + 'expires_in': {'key': 'expiresIn', 'type': 'int'}, + 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, + 'scope': {'key': 'scope', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(NotebookAccessTokenResult, self).__init__(**kwargs) + self.notebook_resource_id = None + self.host_name = None + self.public_dns = None + self.access_token = None + self.token_type = None + self.expires_in = None + self.refresh_token = None + self.scope = None + + +class NotebookPreparationError(Model): + """NotebookPreparationError. + + :param error_message: + :type error_message: str + :param status_code: + :type status_code: int + """ + + _attribute_map = { + 'error_message': {'key': 'errorMessage', 'type': 'str'}, + 'status_code': {'key': 'statusCode', 'type': 'int'}, + } + + def __init__(self, *, error_message: str=None, status_code: int=None, **kwargs) -> None: + super(NotebookPreparationError, self).__init__(**kwargs) + self.error_message = error_message + self.status_code = status_code + + +class NotebookResourceInfo(Model): + """NotebookResourceInfo. + + :param fqdn: + :type fqdn: str + :param resource_id: the data plane resourceId that used to initialize + notebook component + :type resource_id: str + :param notebook_preparation_error: The error that occurs when preparing + notebook. + :type notebook_preparation_error: + ~azure.mgmt.machinelearningservices.models.NotebookPreparationError + """ + + _attribute_map = { + 'fqdn': {'key': 'fqdn', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'}, + } + + def __init__(self, *, fqdn: str=None, resource_id: str=None, notebook_preparation_error=None, **kwargs) -> None: + super(NotebookResourceInfo, self).__init__(**kwargs) + self.fqdn = fqdn + self.resource_id = resource_id + self.notebook_preparation_error = notebook_preparation_error + + +class Objective(Model): + """Optimization objective. + + All required parameters must be populated in order to send to Azure. + + :param goal: Required. Defines supported metric goals for hyperparameter + tuning. Possible values include: 'Minimize', 'Maximize' + :type goal: str or ~azure.mgmt.machinelearningservices.models.Goal + :param primary_metric: Required. Name of the metric to optimize. + :type primary_metric: str + """ + + _validation = { + 'goal': {'required': True}, + 'primary_metric': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'goal': {'key': 'goal', 'type': 'str'}, + 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + } + + def __init__(self, *, goal, primary_metric: str, **kwargs) -> None: + super(Objective, self).__init__(**kwargs) + self.goal = goal + self.primary_metric = primary_metric + + +class OnlineDeploymentTrackedResource(TrackedResource): + """OnlineDeploymentTrackedResource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives + :type location: str + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'OnlineDeployment'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, location: str, properties, tags=None, identity=None, kind: str=None, **kwargs) -> None: + super(OnlineDeploymentTrackedResource, self).__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.system_data = None + + +class OnlineEndpoint(Model): + """Online endpoint configuration. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param auth_mode: Required. Inference endpoint authentication mode type. + Possible values include: 'AMLToken', 'Key', 'AADToken' + :type auth_mode: str or + ~azure.mgmt.machinelearningservices.models.EndpointAuthMode + :param description: Description of the inference endpoint. + :type description: str + :param keys: EndpointAuthKeys to set initially on an Endpoint. + This property will always be returned as null. AuthKey values must be + retrieved using the ListKeys API. + :type keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys + :param properties: Property dictionary. Properties can be added, but not + removed or altered. + :type properties: dict[str, str] + :ivar provisioning_state: State of endpoint provisioning. Possible values + include: 'Creating', 'Deleting', 'Succeeded', 'Failed', 'Updating', + 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState + :ivar scoring_uri: Endpoint URI. + :vartype scoring_uri: str + :ivar swagger_uri: Endpoint Swagger URI. + :vartype swagger_uri: str + :param target: ARM resource ID of the compute if it exists. + optional + :type target: str + :param traffic: Traffic rules on how the traffic will be routed across + deployments. + :type traffic: dict[str, int] + """ + + _validation = { + 'auth_mode': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'scoring_uri': {'readonly': True}, + 'swagger_uri': {'readonly': True}, + } + + _attribute_map = { + 'auth_mode': {'key': 'authMode', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, + 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'traffic': {'key': 'traffic', 'type': '{int}'}, + } + + def __init__(self, *, auth_mode, description: str=None, keys=None, properties=None, target: str=None, traffic=None, **kwargs) -> None: + super(OnlineEndpoint, self).__init__(**kwargs) + self.auth_mode = auth_mode + self.description = description + self.keys = keys + self.properties = properties + self.provisioning_state = None + self.scoring_uri = None + self.swagger_uri = None + self.target = target + self.traffic = traffic + + +class OnlineEndpointTrackedResource(TrackedResource): + """OnlineEndpointTrackedResource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param tags: Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives + :type location: str + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param properties: Required. Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :ivar system_data: System data associated with resource provider + :vartype system_data: + ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'properties': {'required': True}, + 'system_data': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'OnlineEndpoint'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, location: str, properties, tags=None, identity=None, kind: str=None, **kwargs) -> None: + super(OnlineEndpointTrackedResource, self).__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.kind = kind + self.properties = properties + self.system_data = None + + +class OnlineRequestSettings(Model): + """Online deployment scoring requests configuration. + + :param max_concurrent_requests_per_instance: The number of requests + allowed to queue at once for this deployment. + :type max_concurrent_requests_per_instance: int + :param max_queue_wait: The maximum queue wait time in ISO 8601 format. + Supports millisecond precision. + :type max_queue_wait: timedelta + :param request_timeout: The request timeout in ISO 8601 format. Supports + millisecond precision. + :type request_timeout: timedelta + """ + + _attribute_map = { + 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'}, + 'max_queue_wait': {'key': 'maxQueueWait', 'type': 'duration'}, + 'request_timeout': {'key': 'requestTimeout', 'type': 'duration'}, + } + + def __init__(self, *, max_concurrent_requests_per_instance: int=None, max_queue_wait=None, request_timeout=None, **kwargs) -> None: + super(OnlineRequestSettings, self).__init__(**kwargs) + self.max_concurrent_requests_per_instance = max_concurrent_requests_per_instance + self.max_queue_wait = max_queue_wait + self.request_timeout = request_timeout + + +class Operation(Model): + """Azure Machine Learning workspace REST API operation. + + :param name: Operation name: {provider}/{resource}/{operation} + :type name: str + :param display: Display name of operation + :type display: ~azure.mgmt.machinelearningservices.models.OperationDisplay + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + } + + def __init__(self, *, name: str=None, display=None, **kwargs) -> None: + super(Operation, self).__init__(**kwargs) + self.name = name + self.display = display + + +class OperationDisplay(Model): + """Display name of operation. + + :param provider: The resource provider name: + Microsoft.MachineLearningExperimentation + :type provider: str + :param resource: The resource on which the operation is performed. + :type resource: str + :param operation: The operation that users can perform. + :type operation: str + :param description: The description for the operation. + :type description: str + """ + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__(self, *, provider: str=None, resource: str=None, operation: str=None, description: str=None, **kwargs) -> None: + super(OperationDisplay, self).__init__(**kwargs) + self.provider = provider + self.resource = resource + self.operation = operation + self.description = description + + +class OutputDataBinding(Model): + """OutputDataBinding. + + :param datastore_id: ARM resource ID of the datastore where the data + output will be stored. + :type datastore_id: str + :param mode: Mechanism for data movement to datastore. Possible values + include: 'Mount', 'Download', 'Upload' + :type mode: str or + ~azure.mgmt.machinelearningservices.models.DataBindingMode + :param path_on_compute: Location of data inside the container process. + :type path_on_compute: str + :param path_on_datastore: Path within the datastore to the data. + :type path_on_datastore: str + """ + + _attribute_map = { + 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, + 'mode': {'key': 'mode', 'type': 'str'}, + 'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'}, + 'path_on_datastore': {'key': 'pathOnDatastore', 'type': 'str'}, + } + + def __init__(self, *, datastore_id: str=None, mode=None, path_on_compute: str=None, path_on_datastore: str=None, **kwargs) -> None: + super(OutputDataBinding, self).__init__(**kwargs) + self.datastore_id = datastore_id + self.mode = mode + self.path_on_compute = path_on_compute + self.path_on_datastore = path_on_datastore + + +class OutputPathAssetReference(AssetReferenceBase): + """Reference to an asset via its path in a job output. + + All required parameters must be populated in order to send to Azure. + + :param reference_type: Required. Constant filled by server. + :type reference_type: str + :param job_id: ARM resource ID of the job. + :type job_id: str + :param path: The path of the file/directory in the job output. + :type path: str + """ + + _validation = { + 'reference_type': {'required': True}, + } + + _attribute_map = { + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + 'job_id': {'key': 'jobId', 'type': 'str'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__(self, *, job_id: str=None, path: str=None, **kwargs) -> None: + super(OutputPathAssetReference, self).__init__(**kwargs) + self.job_id = job_id + self.path = path + self.reference_type = 'OutputPath' + + +class PartialOnlineDeployment(Model): + """Mutable online deployment configuration. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: PartialAksOnlineDeployment, PartialManagedOnlineDeployment + + All required parameters must be populated in order to send to Azure. + + :param app_insights_enabled: Whether AppInsights telemetry is enabled for + this online deployment. + :type app_insights_enabled: bool + :param liveness_probe: Deployment container liveness/readiness probe + configuration. + :type liveness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + :param request_settings: Online deployment scoring requests configuration. + :type request_settings: + ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :param scale_settings: Online deployment scaling configuration. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :param endpoint_compute_type: Required. Constant filled by server. + :type endpoint_compute_type: str + """ + + _validation = { + 'endpoint_compute_type': {'required': True}, + } + + _attribute_map = { + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + } + + _subtype_map = { + 'endpoint_compute_type': {'K8S': 'PartialAksOnlineDeployment', 'Managed': 'PartialManagedOnlineDeployment'} + } + + def __init__(self, *, app_insights_enabled: bool=None, liveness_probe=None, request_settings=None, scale_settings=None, **kwargs) -> None: + super(PartialOnlineDeployment, self).__init__(**kwargs) + self.app_insights_enabled = app_insights_enabled + self.liveness_probe = liveness_probe + self.request_settings = request_settings + self.scale_settings = scale_settings + self.endpoint_compute_type = None + + +class PartialAksOnlineDeployment(PartialOnlineDeployment): + """PartialAksOnlineDeployment. + + All required parameters must be populated in order to send to Azure. + + :param app_insights_enabled: Whether AppInsights telemetry is enabled for + this online deployment. + :type app_insights_enabled: bool + :param liveness_probe: Deployment container liveness/readiness probe + configuration. + :type liveness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + :param request_settings: Online deployment scoring requests configuration. + :type request_settings: + ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :param scale_settings: Online deployment scaling configuration. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :param endpoint_compute_type: Required. Constant filled by server. + :type endpoint_compute_type: str + :param container_resource_requirements: Resource requirements for each + container instance within an online deployment. + :type container_resource_requirements: + ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements + """ + + _validation = { + 'endpoint_compute_type': {'required': True}, + } + + _attribute_map = { + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'}, + } + + def __init__(self, *, app_insights_enabled: bool=None, liveness_probe=None, request_settings=None, scale_settings=None, container_resource_requirements=None, **kwargs) -> None: + super(PartialAksOnlineDeployment, self).__init__(app_insights_enabled=app_insights_enabled, liveness_probe=liveness_probe, request_settings=request_settings, scale_settings=scale_settings, **kwargs) + self.container_resource_requirements = container_resource_requirements + self.endpoint_compute_type = 'K8S' + + +class PartialBatchDeployment(Model): + """Mutable batch inference settings per deployment. + + :param description: Description of the endpoint deployment. + :type description: str + """ + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__(self, *, description: str=None, **kwargs) -> None: + super(PartialBatchDeployment, self).__init__(**kwargs) + self.description = description + + +class PartialBatchDeploymentPartialTrackedResource(Model): + """Strictly used in update requests. + + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param location: The geo-location where the resource lives. + :type location: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.PartialBatchDeployment + :param tags: Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PartialBatchDeployment'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, identity=None, kind: str=None, location: str=None, properties=None, tags=None, **kwargs) -> None: + super(PartialBatchDeploymentPartialTrackedResource, self).__init__(**kwargs) + self.identity = identity + self.kind = kind + self.location = location + self.properties = properties + self.tags = tags + + +class PartialBatchEndpoint(Model): + """Mutable Batch endpoint configuration. + + :param traffic: Traffic rules on how the traffic will be routed across + deployments. + :type traffic: dict[str, int] + """ + + _attribute_map = { + 'traffic': {'key': 'traffic', 'type': '{int}'}, + } + + def __init__(self, *, traffic=None, **kwargs) -> None: + super(PartialBatchEndpoint, self).__init__(**kwargs) + self.traffic = traffic + + +class PartialBatchEndpointPartialTrackedResource(Model): + """Strictly used in update requests. + + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param location: The geo-location where the resource lives. + :type location: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.PartialBatchEndpoint + :param tags: Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PartialBatchEndpoint'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, identity=None, kind: str=None, location: str=None, properties=None, tags=None, **kwargs) -> None: + super(PartialBatchEndpointPartialTrackedResource, self).__init__(**kwargs) + self.identity = identity + self.kind = kind + self.location = location + self.properties = properties + self.tags = tags + + +class PartialManagedOnlineDeployment(PartialOnlineDeployment): + """PartialManagedOnlineDeployment. + + All required parameters must be populated in order to send to Azure. + + :param app_insights_enabled: Whether AppInsights telemetry is enabled for + this online deployment. + :type app_insights_enabled: bool + :param liveness_probe: Deployment container liveness/readiness probe + configuration. + :type liveness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + :param request_settings: Online deployment scoring requests configuration. + :type request_settings: + ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings + :param scale_settings: Online deployment scaling configuration. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings + :param endpoint_compute_type: Required. Constant filled by server. + :type endpoint_compute_type: str + :param readiness_probe: Deployment container liveness/readiness probe + configuration. + :type readiness_probe: + ~azure.mgmt.machinelearningservices.models.ProbeSettings + """ + + _validation = { + 'endpoint_compute_type': {'required': True}, + } + + _attribute_map = { + 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, + 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, + 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, + 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, + } + + def __init__(self, *, app_insights_enabled: bool=None, liveness_probe=None, request_settings=None, scale_settings=None, readiness_probe=None, **kwargs) -> None: + super(PartialManagedOnlineDeployment, self).__init__(app_insights_enabled=app_insights_enabled, liveness_probe=liveness_probe, request_settings=request_settings, scale_settings=scale_settings, **kwargs) + self.readiness_probe = readiness_probe + self.endpoint_compute_type = 'Managed' + + +class PartialOnlineDeploymentPartialTrackedResource(Model): + """Strictly used in update requests. + + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param location: The geo-location where the resource lives. + :type location: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.PartialOnlineDeployment + :param tags: Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PartialOnlineDeployment'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, identity=None, kind: str=None, location: str=None, properties=None, tags=None, **kwargs) -> None: + super(PartialOnlineDeploymentPartialTrackedResource, self).__init__(**kwargs) + self.identity = identity + self.kind = kind + self.location = location + self.properties = properties + self.tags = tags + + +class PartialOnlineEndpoint(Model): + """Mutable online endpoint configuration. + + :param traffic: Traffic rules on how the traffic will be routed across + deployments. + :type traffic: dict[str, int] + """ + + _attribute_map = { + 'traffic': {'key': 'traffic', 'type': '{int}'}, + } + + def __init__(self, *, traffic=None, **kwargs) -> None: + super(PartialOnlineEndpoint, self).__init__(**kwargs) + self.traffic = traffic + + +class PartialOnlineEndpointPartialTrackedResource(Model): + """Strictly used in update requests. + + :param identity: Service identity associated with a resource. + :type identity: + ~azure.mgmt.machinelearningservices.models.ResourceIdentity + :param kind: Metadata used by portal/tooling/etc to render different UX + experiences for resources of the same type. + :type kind: str + :param location: The geo-location where the resource lives. + :type location: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.PartialOnlineEndpoint + :param tags: Resource tags. + :type tags: dict[str, str] + """ + + _attribute_map = { + 'identity': {'key': 'identity', 'type': 'ResourceIdentity'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PartialOnlineEndpoint'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__(self, *, identity=None, kind: str=None, location: str=None, properties=None, tags=None, **kwargs) -> None: + super(PartialOnlineEndpointPartialTrackedResource, self).__init__(**kwargs) + self.identity = identity + self.kind = kind + self.location = location + self.properties = properties + self.tags = tags + + +class Password(Model): + """Password. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: + :vartype name: str + :ivar value: + :vartype value: str + """ + + _validation = { + 'name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(Password, self).__init__(**kwargs) + self.name = None + self.value = None + + +class PersonalComputeInstanceSettings(Model): + """Settings for a personal compute instance. + + :param assigned_user: Assigned User. A user explicitly assigned to a + personal compute instance. + :type assigned_user: + ~azure.mgmt.machinelearningservices.models.AssignedUser + """ + + _attribute_map = { + 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'}, + } + + def __init__(self, *, assigned_user=None, **kwargs) -> None: + super(PersonalComputeInstanceSettings, self).__init__(**kwargs) + self.assigned_user = assigned_user + + +class PrivateEndpoint(Model): + """The Private Endpoint resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The ARM identifier for Private Endpoint + :vartype id: str + :ivar subnet_arm_id: The ARM identifier for Subnet resource that private + endpoint links to + :vartype subnet_arm_id: str + """ + + _validation = { + 'id': {'readonly': True}, + 'subnet_arm_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = None + self.subnet_arm_id = None + + +class PrivateEndpointConnection(Resource): + """The Private Endpoint Connection resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :param private_endpoint: The resource of private end point. + :type private_endpoint: + ~azure.mgmt.machinelearningservices.models.PrivateEndpoint + :param private_link_service_connection_state: Required. A collection of + information about the state of the connection between service consumer and + provider. + :type private_link_service_connection_state: + ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState + :param provisioning_state: The provisioning state of the private endpoint + connection resource. Possible values include: 'Succeeded', 'Creating', + 'Deleting', 'Failed' + :type provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionProvisioningState + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.machinelearningservices.models.Identity + :param location: Specifies the location of the resource. + :type location: str + :param tags: Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure.mgmt.machinelearningservices.models.Sku + :param system_data: + :type system_data: ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'private_link_service_connection_state': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, private_link_service_connection_state, private_endpoint=None, provisioning_state=None, identity=None, location: str=None, tags=None, sku=None, system_data=None, **kwargs) -> None: + super(PrivateEndpointConnection, self).__init__(**kwargs) + self.private_endpoint = private_endpoint + self.private_link_service_connection_state = private_link_service_connection_state + self.provisioning_state = provisioning_state + self.identity = identity + self.location = location + self.tags = tags + self.sku = sku + self.system_data = system_data + + +class PrivateLinkResource(Resource): + """A private link resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar required_members: The private link resource required member names. + :vartype required_members: list[str] + :param required_zone_names: The private link resource Private link DNS + zone name. + :type required_zone_names: list[str] + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.machinelearningservices.models.Identity + :param location: Specifies the location of the resource. + :type location: str + :param tags: Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure.mgmt.machinelearningservices.models.Sku + :param system_data: + :type system_data: ~azure.mgmt.machinelearningservices.models.SystemData + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'group_id': {'readonly': True}, + 'required_members': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + } + + def __init__(self, *, required_zone_names=None, identity=None, location: str=None, tags=None, sku=None, system_data=None, **kwargs) -> None: + super(PrivateLinkResource, self).__init__(**kwargs) + self.group_id = None + self.required_members = None + self.required_zone_names = required_zone_names + self.identity = identity + self.location = location + self.tags = tags + self.sku = sku + self.system_data = system_data + + +class PrivateLinkResourceListResult(Model): + """A list of private link resources. + + :param value: Array of private link resources + :type value: + list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + } + + def __init__(self, *, value=None, **kwargs) -> None: + super(PrivateLinkResourceListResult, self).__init__(**kwargs) + self.value = value + + +class PrivateLinkServiceConnectionState(Model): + """A collection of information about the state of the connection between + service consumer and provider. + + :param status: Indicates whether the connection has been + Approved/Rejected/Removed by the owner of the service. Possible values + include: 'Pending', 'Approved', 'Rejected', 'Disconnected', 'Timeout' + :type status: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + :param description: The reason for approval/rejection of the connection. + :type description: str + :param actions_required: A message indicating if changes on the service + provider require any updates on the consumer. + :type actions_required: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__(self, *, status=None, description: str=None, actions_required: str=None, **kwargs) -> None: + super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + self.status = status + self.description = description + self.actions_required = actions_required + + +class ProbeSettings(Model): + """Deployment container liveness/readiness probe configuration. + + :param failure_threshold: The number of failures to allow before returning + an unhealthy status. + :type failure_threshold: int + :param initial_delay: The delay before the first probe in ISO 8601 format. + :type initial_delay: timedelta + :param period: The length of time between probes in ISO 8601 format. + :type period: timedelta + :param success_threshold: The number of successful probes before returning + a healthy status. + :type success_threshold: int + :param timeout: The probe timeout in ISO 8601 format. + :type timeout: timedelta + """ + + _attribute_map = { + 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'}, + 'initial_delay': {'key': 'initialDelay', 'type': 'duration'}, + 'period': {'key': 'period', 'type': 'duration'}, + 'success_threshold': {'key': 'successThreshold', 'type': 'int'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + } + + def __init__(self, *, failure_threshold: int=None, initial_delay=None, period=None, success_threshold: int=None, timeout=None, **kwargs) -> None: + super(ProbeSettings, self).__init__(**kwargs) + self.failure_threshold = failure_threshold + self.initial_delay = initial_delay + self.period = period + self.success_threshold = success_threshold + self.timeout = timeout + + +class ProgressMetrics(Model): + """Progress metrics definition. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar completed_datapoint_count: The completed datapoint count. + :vartype completed_datapoint_count: long + :ivar incremental_dataset_last_refresh_time: The time of last successful + incremental dataset refresh in UTC. + :vartype incremental_dataset_last_refresh_time: datetime + :ivar skipped_datapoint_count: The skipped datapoint count. + :vartype skipped_datapoint_count: long + :ivar total_datapoint_count: The total datapoint count. + :vartype total_datapoint_count: long + """ + + _validation = { + 'completed_datapoint_count': {'readonly': True}, + 'incremental_dataset_last_refresh_time': {'readonly': True}, + 'skipped_datapoint_count': {'readonly': True}, + 'total_datapoint_count': {'readonly': True}, + } + + _attribute_map = { + 'completed_datapoint_count': {'key': 'completedDatapointCount', 'type': 'long'}, + 'incremental_dataset_last_refresh_time': {'key': 'incrementalDatasetLastRefreshTime', 'type': 'iso-8601'}, + 'skipped_datapoint_count': {'key': 'skippedDatapointCount', 'type': 'long'}, + 'total_datapoint_count': {'key': 'totalDatapointCount', 'type': 'long'}, + } + + def __init__(self, **kwargs) -> None: + super(ProgressMetrics, self).__init__(**kwargs) + self.completed_datapoint_count = None + self.incremental_dataset_last_refresh_time = None + self.skipped_datapoint_count = None + self.total_datapoint_count = None + + +class ProxyResource(Resource): + """Proxy Resource. + + The resource model definition for a Azure Resource Manager proxy resource. + It will not have tags and a location. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} + :vartype id: str + :ivar name: The name of the resource + :vartype name: str + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ProxyResource, self).__init__(**kwargs) + + +class PyTorch(DistributionConfiguration): + """PyTorch distribution configuration. + + All required parameters must be populated in order to send to Azure. + + :param distribution_type: Required. Constant filled by server. + :type distribution_type: str + :param process_count: Total process count for the distributed job. + :type process_count: int + """ + + _validation = { + 'distribution_type': {'required': True}, + } + + _attribute_map = { + 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + 'process_count': {'key': 'processCount', 'type': 'int'}, + } + + def __init__(self, *, process_count: int=None, **kwargs) -> None: + super(PyTorch, self).__init__(**kwargs) + self.process_count = process_count + self.distribution_type = 'PyTorch' + + +class QuotaBaseProperties(Model): + """The properties for Quota update or retrieval. + + :param id: Specifies the resource ID. + :type id: str + :param type: Specifies the resource type. + :type type: str + :param limit: Limit. The maximum permitted quota of the resource. + :type limit: long + :param unit: An enum describing the unit of quota measurement. Possible + values include: 'Count' + :type unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, + } + + def __init__(self, *, id: str=None, type: str=None, limit: int=None, unit=None, **kwargs) -> None: + super(QuotaBaseProperties, self).__init__(**kwargs) + self.id = id + self.type = type + self.limit = limit + self.unit = unit + + +class QuotaUpdateParameters(Model): + """Quota update parameters. + + :param value: The list for update quota. + :type value: + list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] + :param location: Region of workspace quota to be updated. + :type location: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'}, + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__(self, *, value=None, location: str=None, **kwargs) -> None: + super(QuotaUpdateParameters, self).__init__(**kwargs) + self.value = value + self.location = location + + +class Recurrence(Model): + """The workflow trigger recurrence for ComputeStartStop schedule type. + + :param frequency: Possible values include: 'NotSpecified', 'Second', + 'Minute', 'Hour', 'Day', 'Week', 'Month', 'Year' + :type frequency: str or + ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: str + :param time_zone: The time zone. + :type time_zone: str + :param schedule: + :type schedule: + ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule + """ + + _attribute_map = { + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__(self, *, frequency=None, interval: int=None, start_time: str=None, time_zone: str=None, schedule=None, **kwargs) -> None: + super(Recurrence, self).__init__(**kwargs) + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.time_zone = time_zone + self.schedule = schedule + + +class RecurrenceSchedule(Model): + """The recurrence schedule. + + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or + ~azure.mgmt.machinelearningservices.models.DaysOfWeek] + """ + + _attribute_map = { + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[DaysOfWeek]'}, + } + + def __init__(self, *, minutes=None, hours=None, week_days=None, **kwargs) -> None: + super(RecurrenceSchedule, self).__init__(**kwargs) + self.minutes = minutes + self.hours = hours + self.week_days = week_days + + +class RegenerateEndpointKeysRequest(Model): + """RegenerateEndpointKeysRequest. + + All required parameters must be populated in order to send to Azure. + + :param key_type: Required. Specification for which type of key to + generate. Primary or Secondary. Possible values include: 'Primary', + 'Secondary' + :type key_type: str or ~azure.mgmt.machinelearningservices.models.KeyType + :param key_value: The value the key is set to. + :type key_value: str + """ + + _validation = { + 'key_type': {'required': True}, + } + + _attribute_map = { + 'key_type': {'key': 'keyType', 'type': 'str'}, + 'key_value': {'key': 'keyValue', 'type': 'str'}, + } + + def __init__(self, *, key_type, key_value: str=None, **kwargs) -> None: + super(RegenerateEndpointKeysRequest, self).__init__(**kwargs) + self.key_type = key_type + self.key_value = key_value + + +class RegistryListCredentialsResult(Model): + """RegistryListCredentialsResult. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar location: + :vartype location: str + :ivar username: + :vartype username: str + :param passwords: + :type passwords: list[~azure.mgmt.machinelearningservices.models.Password] + """ + + _validation = { + 'location': {'readonly': True}, + 'username': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'str'}, + 'passwords': {'key': 'passwords', 'type': '[Password]'}, + } + + def __init__(self, *, passwords=None, **kwargs) -> None: + super(RegistryListCredentialsResult, self).__init__(**kwargs) + self.location = None + self.username = None + self.passwords = passwords + + +class ResourceId(Model): + """Represents a resource ID. For example, for a subnet, it is the resource URL + for the subnet. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. The ID of the resource + :type id: str + """ + + _validation = { + 'id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__(self, *, id: str, **kwargs) -> None: + super(ResourceId, self).__init__(**kwargs) + self.id = id + + +class ResourceIdentity(Model): + """Service identity associated with a resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar principal_id: Client ID that is used when authenticating. + :vartype principal_id: str + :ivar tenant_id: AAD Tenant where this identity lives. + :vartype tenant_id: str + :param type: Defines values for a ResourceIdentity's type. Possible values + include: 'SystemAssigned', 'UserAssigned', 'SystemAssigned,UserAssigned', + 'None' + :type type: str or + ~azure.mgmt.machinelearningservices.models.ResourceIdentityAssignment + :param user_assigned_identities: Dictionary of the user assigned + identities, key is ARM resource ID of the UAI. + :type user_assigned_identities: dict[str, + ~azure.mgmt.machinelearningservices.models.UserAssignedIdentityMeta] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentityMeta}'}, + } + + def __init__(self, *, type=None, user_assigned_identities=None, **kwargs) -> None: + super(ResourceIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + self.user_assigned_identities = user_assigned_identities + + +class ResourceName(Model): + """The Resource Name. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar value: The name of the resource. + :vartype value: str + :ivar localized_value: The localized name of the resource. + :vartype localized_value: str + """ + + _validation = { + 'value': {'readonly': True}, + 'localized_value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ResourceName, self).__init__(**kwargs) + self.value = None + self.localized_value = None + + +class ResourceQuota(Model): + """The quota assigned to a resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar aml_workspace_location: Region of the AML workspace in the id. + :vartype aml_workspace_location: str + :ivar type: Specifies the resource type. + :vartype type: str + :ivar name: Name of the resource. + :vartype name: ~azure.mgmt.machinelearningservices.models.ResourceName + :ivar limit: Limit. The maximum permitted quota of the resource. + :vartype limit: long + :ivar unit: An enum describing the unit of quota measurement. Possible + values include: 'Count' + :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + """ + + _validation = { + 'id': {'readonly': True}, + 'aml_workspace_location': {'readonly': True}, + 'type': {'readonly': True}, + 'name': {'readonly': True}, + 'limit': {'readonly': True}, + 'unit': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'ResourceName'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ResourceQuota, self).__init__(**kwargs) + self.id = None + self.aml_workspace_location = None + self.type = None + self.name = None + self.limit = None + self.unit = None + + +class ResourceSkuLocationInfo(Model): + """ResourceSkuLocationInfo. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar location: Location of the SKU + :vartype location: str + :ivar zones: List of availability zones where the SKU is supported. + :vartype zones: list[str] + :ivar zone_details: Details of capabilities available to a SKU in specific + zones. + :vartype zone_details: + list[~azure.mgmt.machinelearningservices.models.ResourceSkuZoneDetails] + """ + + _validation = { + 'location': {'readonly': True}, + 'zones': {'readonly': True}, + 'zone_details': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'zones': {'key': 'zones', 'type': '[str]'}, + 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'}, + } + + def __init__(self, **kwargs) -> None: + super(ResourceSkuLocationInfo, self).__init__(**kwargs) + self.location = None + self.zones = None + self.zone_details = None + + +class ResourceSkuZoneDetails(Model): + """Describes The zonal capabilities of a SKU. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar name: The set of zones that the SKU is available in with the + specified capabilities. + :vartype name: list[str] + :ivar capabilities: A list of capabilities that are available for the SKU + in the specified list of zones. + :vartype capabilities: + list[~azure.mgmt.machinelearningservices.models.SKUCapability] + """ + + _validation = { + 'name': {'readonly': True}, + 'capabilities': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': '[str]'}, + 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, + } + + def __init__(self, **kwargs) -> None: + super(ResourceSkuZoneDetails, self).__init__(**kwargs) + self.name = None + self.capabilities = None + + +class Restriction(Model): + """The restriction because of which SKU cannot be used. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar type: The type of restrictions. As of now only possible value for + this is location. + :vartype type: str + :ivar values: The value of restrictions. If the restriction type is set to + location. This would be different locations where the SKU is restricted. + :vartype values: list[str] + :param reason_code: The reason for the restriction. Possible values + include: 'NotSpecified', 'NotAvailableForRegion', + 'NotAvailableForSubscription' + :type reason_code: str or + ~azure.mgmt.machinelearningservices.models.ReasonCode + """ + + _validation = { + 'type': {'readonly': True}, + 'values': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, + 'reason_code': {'key': 'reasonCode', 'type': 'str'}, + } + + def __init__(self, *, reason_code=None, **kwargs) -> None: + super(Restriction, self).__init__(**kwargs) + self.type = None + self.values = None + self.reason_code = reason_code + + +class Route(Model): + """Route. + + All required parameters must be populated in order to send to Azure. + + :param path: Required. The path for the route. + :type path: str + :param port: Required. The port for the route. + :type port: int + """ + + _validation = { + 'path': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + 'port': {'required': True}, + } + + _attribute_map = { + 'path': {'key': 'path', 'type': 'str'}, + 'port': {'key': 'port', 'type': 'int'}, + } + + def __init__(self, *, path: str, port: int, **kwargs) -> None: + super(Route, self).__init__(**kwargs) + self.path = path + self.port = port + + +class SasDatastoreCredentials(DatastoreCredentials): + """SAS datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + :param secrets: Storage container secrets. + :type secrets: + ~azure.mgmt.machinelearningservices.models.SasDatastoreSecrets + """ + + _validation = { + 'credentials_type': {'required': True}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'SasDatastoreSecrets'}, + } + + def __init__(self, *, secrets=None, **kwargs) -> None: + super(SasDatastoreCredentials, self).__init__(**kwargs) + self.secrets = secrets + self.credentials_type = 'Sas' + + +class SasDatastoreSecrets(DatastoreSecrets): + """Datastore SAS secrets. + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + :param sas_token: Storage container SAS token. + :type sas_token: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'str'}, + } + + def __init__(self, *, sas_token: str=None, **kwargs) -> None: + super(SasDatastoreSecrets, self).__init__(**kwargs) + self.sas_token = sas_token + self.secrets_type = 'Sas' + + +class ScaleSettings(Model): + """scale settings for AML Compute. + + All required parameters must be populated in order to send to Azure. + + :param max_node_count: Required. Max number of nodes to use + :type max_node_count: int + :param min_node_count: Min number of nodes to use. Default value: 0 . + :type min_node_count: int + :param node_idle_time_before_scale_down: Node Idle Time before scaling + down amlCompute. This string needs to be in the RFC Format. + :type node_idle_time_before_scale_down: timedelta + """ + + _validation = { + 'max_node_count': {'required': True}, + } + + _attribute_map = { + 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, + 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, + 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, + } + + def __init__(self, *, max_node_count: int, min_node_count: int=0, node_idle_time_before_scale_down=None, **kwargs) -> None: + super(ScaleSettings, self).__init__(**kwargs) + self.max_node_count = max_node_count + self.min_node_count = min_node_count + self.node_idle_time_before_scale_down = node_idle_time_before_scale_down + + +class ScriptReference(Model): + """Script reference. + + :param script_source: The storage source of the script: inline, workspace. + :type script_source: str + :param script_data: The location of scripts in the mounted volume. + :type script_data: str + :param script_arguments: Optional command line arguments passed to the + script to run. + :type script_arguments: str + :param timeout: Optional time period passed to timeout command. + :type timeout: str + """ + + _attribute_map = { + 'script_source': {'key': 'scriptSource', 'type': 'str'}, + 'script_data': {'key': 'scriptData', 'type': 'str'}, + 'script_arguments': {'key': 'scriptArguments', 'type': 'str'}, + 'timeout': {'key': 'timeout', 'type': 'str'}, + } + + def __init__(self, *, script_source: str=None, script_data: str=None, script_arguments: str=None, timeout: str=None, **kwargs) -> None: + super(ScriptReference, self).__init__(**kwargs) + self.script_source = script_source + self.script_data = script_data + self.script_arguments = script_arguments + self.timeout = timeout + + +class ScriptsToExecute(Model): + """Customized setup scripts. + + :param startup_script: Script that's run every time the machine starts. + :type startup_script: + ~azure.mgmt.machinelearningservices.models.ScriptReference + :param creation_script: Script that's run only once during provision of + the compute. + :type creation_script: + ~azure.mgmt.machinelearningservices.models.ScriptReference + """ + + _attribute_map = { + 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'}, + 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'}, + } + + def __init__(self, *, startup_script=None, creation_script=None, **kwargs) -> None: + super(ScriptsToExecute, self).__init__(**kwargs) + self.startup_script = startup_script + self.creation_script = creation_script + + +class ServiceManagedResourcesSettings(Model): + """ServiceManagedResourcesSettings. + + :param cosmos_db: The settings for the service managed cosmosdb account. + :type cosmos_db: + ~azure.mgmt.machinelearningservices.models.CosmosDbSettings + """ + + _attribute_map = { + 'cosmos_db': {'key': 'cosmosDb', 'type': 'CosmosDbSettings'}, + } + + def __init__(self, *, cosmos_db=None, **kwargs) -> None: + super(ServiceManagedResourcesSettings, self).__init__(**kwargs) + self.cosmos_db = cosmos_db + + +class ServicePrincipalCredentials(Model): + """Service principal credentials. + + All required parameters must be populated in order to send to Azure. + + :param client_id: Required. Client Id + :type client_id: str + :param client_secret: Required. Client secret + :type client_secret: str + """ + + _validation = { + 'client_id': {'required': True}, + 'client_secret': {'required': True}, + } + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + } + + def __init__(self, *, client_id: str, client_secret: str, **kwargs) -> None: + super(ServicePrincipalCredentials, self).__init__(**kwargs) + self.client_id = client_id + self.client_secret = client_secret + + +class ServicePrincipalDatastoreCredentials(DatastoreCredentials): + """Service Principal datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + :param authority_url: Authority URL used for authentication. + :type authority_url: str + :param client_id: Required. Service principal client ID. + :type client_id: str + :param resource_uri: Resource the service principal has access to. + :type resource_uri: str + :param secrets: Service principal secrets. + :type secrets: + ~azure.mgmt.machinelearningservices.models.ServicePrincipalDatastoreSecrets + :param tenant_id: Required. ID of the tenant to which the service + principal belongs. + :type tenant_id: str + """ + + _validation = { + 'credentials_type': {'required': True}, + 'client_id': {'required': True}, + 'tenant_id': {'required': True}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'authority_url': {'key': 'authorityUrl', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'resource_uri': {'key': 'resourceUri', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'ServicePrincipalDatastoreSecrets'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__(self, *, client_id: str, tenant_id: str, authority_url: str=None, resource_uri: str=None, secrets=None, **kwargs) -> None: + super(ServicePrincipalDatastoreCredentials, self).__init__(**kwargs) + self.authority_url = authority_url + self.client_id = client_id + self.resource_uri = resource_uri + self.secrets = secrets + self.tenant_id = tenant_id + self.credentials_type = 'ServicePrincipal' + + +class ServicePrincipalDatastoreSecrets(DatastoreSecrets): + """Datastore Service Principal secrets. + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + :param client_secret: Service principal secret. + :type client_secret: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + } + + def __init__(self, *, client_secret: str=None, **kwargs) -> None: + super(ServicePrincipalDatastoreSecrets, self).__init__(**kwargs) + self.client_secret = client_secret + self.secrets_type = 'ServicePrincipal' + + +class SetupScripts(Model): + """Details of customized scripts to execute for setting up the cluster. + + :param scripts: Customized setup scripts + :type scripts: ~azure.mgmt.machinelearningservices.models.ScriptsToExecute + """ + + _attribute_map = { + 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'}, + } + + def __init__(self, *, scripts=None, **kwargs) -> None: + super(SetupScripts, self).__init__(**kwargs) + self.scripts = scripts + + +class SharedPrivateLinkResource(Model): + """SharedPrivateLinkResource. + + :param name: Unique name of the private link. + :type name: str + :param private_link_resource_id: The resource id that private link links + to. + :type private_link_resource_id: str + :param group_id: The private link resource group id. + :type group_id: str + :param request_message: Request message. + :type request_message: str + :param status: Indicates whether the connection has been + Approved/Rejected/Removed by the owner of the service. Possible values + include: 'Pending', 'Approved', 'Rejected', 'Disconnected', 'Timeout' + :type status: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointServiceConnectionStatus + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, private_link_resource_id: str=None, group_id: str=None, request_message: str=None, status=None, **kwargs) -> None: + super(SharedPrivateLinkResource, self).__init__(**kwargs) + self.name = name + self.private_link_resource_id = private_link_resource_id + self.group_id = group_id + self.request_message = request_message + self.status = status + + +class Sku(Model): + """Sku of the resource. + + :param name: Name of the sku + :type name: str + :param tier: Tier of the sku like Basic or Enterprise + :type tier: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, tier: str=None, **kwargs) -> None: + super(Sku, self).__init__(**kwargs) + self.name = name + self.tier = tier + + +class SKUCapability(Model): + """Features/user capabilities associated with the sku. + + :param name: Capability/Feature ID + :type name: str + :param value: Details about the feature/capability + :type value: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__(self, *, name: str=None, value: str=None, **kwargs) -> None: + super(SKUCapability, self).__init__(**kwargs) + self.name = name + self.value = value + + +class SqlAdminDatastoreCredentials(DatastoreCredentials): + """SQL Admin datastore credentials configuration. + + All required parameters must be populated in order to send to Azure. + + :param credentials_type: Required. Constant filled by server. + :type credentials_type: str + :param secrets: SQL database secrets. + :type secrets: + ~azure.mgmt.machinelearningservices.models.SqlAdminDatastoreSecrets + :param user_id: Required. SQL database user name. + :type user_id: str + """ + + _validation = { + 'credentials_type': {'required': True}, + 'user_id': {'required': True, 'pattern': r'[a-zA-Z0-9_]'}, + } + + _attribute_map = { + 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + 'secrets': {'key': 'secrets', 'type': 'SqlAdminDatastoreSecrets'}, + 'user_id': {'key': 'userId', 'type': 'str'}, + } + + def __init__(self, *, user_id: str, secrets=None, **kwargs) -> None: + super(SqlAdminDatastoreCredentials, self).__init__(**kwargs) + self.secrets = secrets + self.user_id = user_id + self.credentials_type = 'SqlAdmin' + + +class SqlAdminDatastoreSecrets(DatastoreSecrets): + """Datastore SQL Admin secrets. + + All required parameters must be populated in order to send to Azure. + + :param secrets_type: Required. Constant filled by server. + :type secrets_type: str + :param password: SQL database password. + :type password: str + """ + + _validation = { + 'secrets_type': {'required': True}, + } + + _attribute_map = { + 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + } + + def __init__(self, *, password: str=None, **kwargs) -> None: + super(SqlAdminDatastoreSecrets, self).__init__(**kwargs) + self.password = password + self.secrets_type = 'SqlAdmin' + + +class SslConfiguration(Model): + """The ssl configuration for scoring. + + :param status: Enable or disable ssl for scoring. Possible values include: + 'Disabled', 'Enabled', 'Auto' + :type status: str or ~azure.mgmt.machinelearningservices.models.enum + :param cert: Cert data + :type cert: str + :param key: Key data + :type key: str + :param cname: CNAME of the cert + :type cname: str + :param leaf_domain_label: Leaf domain label of public endpoint + :type leaf_domain_label: str + :param overwrite_existing_domain: Indicates whether to overwrite existing + domain label. + :type overwrite_existing_domain: bool + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'cert': {'key': 'cert', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'str'}, + 'cname': {'key': 'cname', 'type': 'str'}, + 'leaf_domain_label': {'key': 'leafDomainLabel', 'type': 'str'}, + 'overwrite_existing_domain': {'key': 'overwriteExistingDomain', 'type': 'bool'}, + } + + def __init__(self, *, status=None, cert: str=None, key: str=None, cname: str=None, leaf_domain_label: str=None, overwrite_existing_domain: bool=None, **kwargs) -> None: + super(SslConfiguration, self).__init__(**kwargs) + self.status = status + self.cert = cert + self.key = key + self.cname = cname + self.leaf_domain_label = leaf_domain_label + self.overwrite_existing_domain = overwrite_existing_domain + + +class StatusMessage(Model): + """Active message associated with project. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar code: Service-defined message code. + :vartype code: str + :ivar created_time_utc: Time in UTC at which the message was created. + :vartype created_time_utc: datetime + :ivar level: Severity level of message. Possible values include: 'Error', + 'Information', 'Warning' + :vartype level: str or + ~azure.mgmt.machinelearningservices.models.StatusMessageLevel + :ivar message: A human-readable representation of the message code. + :vartype message: str + """ + + _validation = { + 'code': {'readonly': True}, + 'created_time_utc': {'readonly': True}, + 'level': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'}, + 'level': {'key': 'level', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + def __init__(self, **kwargs) -> None: - super(Password, self).__init__(**kwargs) - self.name = None - self.value = None + super(StatusMessage, self).__init__(**kwargs) + self.code = None + self.created_time_utc = None + self.level = None + self.message = None -class RegistryListCredentialsResult(Model): - """RegistryListCredentialsResult. +class SweepJob(JobBase): + """Sweep job definition. Variables are only populated by the server, and will be ignored when sending a request. - :ivar location: - :vartype location: str - :ivar username: - :vartype username: str - :param passwords: - :type passwords: list[~azure.mgmt.machinelearningservices.models.Password] + All required parameters must be populated in order to send to Azure. + + :param description: The asset description text. + :type description: str + :ivar interaction_endpoints: List of JobEndpoints. + For local jobs, a job endpoint will have an endpoint value of + FileStreamObject. + :vartype interaction_endpoints: dict[str, + ~azure.mgmt.machinelearningservices.models.JobEndpoint] + :param properties: The asset property dictionary. + :type properties: dict[str, str] + :ivar provisioning_state: Specifies the job provisioning state. Possible + values include: 'Succeeded', 'Failed', 'Canceled', 'InProgress' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.JobProvisioningState + :param tags: Tag dictionary. Tags can be added, removed, and updated. + :type tags: dict[str, str] + :param job_type: Required. Constant filled by server. + :type job_type: str + :param algorithm: Required. Type of the hyperparameter sampling + algorithms. Possible values include: 'Grid', 'Random', 'Bayesian' + :type algorithm: str or + ~azure.mgmt.machinelearningservices.models.SamplingAlgorithm + :param compute: Required. Compute binding for the job. + :type compute: + ~azure.mgmt.machinelearningservices.models.ComputeConfiguration + :param early_termination: Early termination policies enable canceling + poor-performing runs before they complete. + :type early_termination: + ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy + :param experiment_name: The name of the experiment the job belongs to. If + not set, the job is placed in the "Default" experiment. + :type experiment_name: str + :param identity: Identity configuration. If set, this should be one of + AmlToken, ManagedIdentity or null. + Defaults to AmlToken if null. + :type identity: + ~azure.mgmt.machinelearningservices.models.IdentityConfiguration + :param max_concurrent_trials: An upper bound on the number of trials + performed in parallel. + :type max_concurrent_trials: int + :param max_total_trials: An upper bound on the number of trials to + perform. + :type max_total_trials: int + :param objective: Required. Optimization objective. + :type objective: ~azure.mgmt.machinelearningservices.models.Objective + :ivar output: Location of the job output logs and artifacts. + :vartype output: ~azure.mgmt.machinelearningservices.models.JobOutput + :param priority: Job priority for scheduling policy. Only applies to + AMLCompute. + Private preview feature and only available to users on the allow list. + :type priority: int + :param search_space: Required. A dictionary containing each parameter and + its distribution. The dictionary key is the name of the parameter + :type search_space: dict[str, object] + :ivar status: The status of a job. Possible values include: 'NotStarted', + 'Starting', 'Provisioning', 'Preparing', 'Queued', 'Running', + 'Finalizing', 'CancelRequested', 'Completed', 'Failed', 'Canceled', + 'NotResponding', 'Paused', 'Unknown' + :vartype status: str or + ~azure.mgmt.machinelearningservices.models.JobStatus + :param timeout: The total timeout in ISO 8601 format. Only supports + duration with precision as low as Minutes. + :type timeout: timedelta + :param trial: Trial component definition. + :type trial: ~azure.mgmt.machinelearningservices.models.TrialComponent + """ + + _validation = { + 'interaction_endpoints': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'job_type': {'required': True}, + 'algorithm': {'required': True}, + 'compute': {'required': True}, + 'objective': {'required': True}, + 'output': {'readonly': True}, + 'search_space': {'required': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'description': {'key': 'description', 'type': 'str'}, + 'interaction_endpoints': {'key': 'interactionEndpoints', 'type': '{JobEndpoint}'}, + 'properties': {'key': 'properties', 'type': '{str}'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'algorithm': {'key': 'algorithm', 'type': 'str'}, + 'compute': {'key': 'compute', 'type': 'ComputeConfiguration'}, + 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, + 'experiment_name': {'key': 'experimentName', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, + 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, + 'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'}, + 'objective': {'key': 'objective', 'type': 'Objective'}, + 'output': {'key': 'output', 'type': 'JobOutput'}, + 'priority': {'key': 'priority', 'type': 'int'}, + 'search_space': {'key': 'searchSpace', 'type': '{object}'}, + 'status': {'key': 'status', 'type': 'str'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, + 'trial': {'key': 'trial', 'type': 'TrialComponent'}, + } + + def __init__(self, *, algorithm, compute, objective, search_space, description: str=None, properties=None, tags=None, early_termination=None, experiment_name: str=None, identity=None, max_concurrent_trials: int=None, max_total_trials: int=None, priority: int=None, timeout=None, trial=None, **kwargs) -> None: + super(SweepJob, self).__init__(description=description, properties=properties, tags=tags, **kwargs) + self.algorithm = algorithm + self.compute = compute + self.early_termination = early_termination + self.experiment_name = experiment_name + self.identity = identity + self.max_concurrent_trials = max_concurrent_trials + self.max_total_trials = max_total_trials + self.objective = objective + self.output = None + self.priority = priority + self.search_space = search_space + self.status = None + self.timeout = timeout + self.trial = trial + self.job_type = 'Sweep' + + +class SynapseSpark(Model): + """A SynapseSpark compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute. Possible values + include: 'AKS', 'AmlCompute', 'ComputeInstance', 'DataFactory', + 'VirtualMachine', 'HDInsight', 'Databricks', 'DataLakeAnalytics', + 'SynapseSpark' + :type compute_type: str or + ~azure.mgmt.machinelearningservices.models.ComputeType + :param compute_location: Location for the underlying compute + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values + are Unknown, Updating, Provisioning, Succeeded, and Failed. Possible + values include: 'Unknown', 'Updating', 'Creating', 'Deleting', + 'Succeeded', 'Failed', 'Canceled' + :vartype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The time at which the compute was created. + :vartype created_on: datetime + :ivar modified_on: The time at which the compute was last modified. + :vartype modified_on: datetime + :param resource_id: ARM resource id of the underlying compute + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning + :vartype provisioning_errors: + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] + :ivar is_attached_compute: Indicating whether the compute was provisioned + by user and brought from outside if true, or machine learning service + provisioned it if false. + :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool + :param properties: AKS properties + :type properties: + ~azure.mgmt.machinelearningservices.models.SynapseSparkProperties + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + 'properties': {'key': 'properties', 'type': 'SynapseSparkProperties'}, + } + + def __init__(self, *, compute_type, compute_location: str=None, description: str=None, resource_id: str=None, disable_local_auth: bool=None, properties=None, **kwargs) -> None: + super(SynapseSpark, self).__init__(**kwargs) + self.compute_type = compute_type + self.compute_location = compute_location + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + self.disable_local_auth = disable_local_auth + self.properties = properties + + +class SynapseSparkPoolProperties(Model): + """Properties specific to Synapse Spark pools. + + :param properties: AKS properties + :type properties: + ~azure.mgmt.machinelearningservices.models.SynapseSparkProperties + """ + + _attribute_map = { + 'properties': {'key': 'properties', 'type': 'SynapseSparkProperties'}, + } + + def __init__(self, *, properties=None, **kwargs) -> None: + super(SynapseSparkPoolProperties, self).__init__(**kwargs) + self.properties = properties + + +class SynapseSparkProperties(Model): + """AKS properties. + + :param auto_scale_properties: Auto scale properties. + :type auto_scale_properties: + ~azure.mgmt.machinelearningservices.models.AutoScaleProperties + :param auto_pause_properties: Auto pause properties. + :type auto_pause_properties: + ~azure.mgmt.machinelearningservices.models.AutoPauseProperties + :param spark_version: Spark version. + :type spark_version: str + :param node_count: The number of compute nodes currently assigned to the + compute. + :type node_count: int + :param node_size: Node size. + :type node_size: str + :param node_size_family: Node size family. + :type node_size_family: str + :param subscription_id: Azure subscription identifier. + :type subscription_id: str + :param resource_group: Name of the resource group in which workspace is + located. + :type resource_group: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param pool_name: Pool name. + :type pool_name: str + """ + + _attribute_map = { + 'auto_scale_properties': {'key': 'autoScaleProperties', 'type': 'AutoScaleProperties'}, + 'auto_pause_properties': {'key': 'autoPauseProperties', 'type': 'AutoPauseProperties'}, + 'spark_version': {'key': 'sparkVersion', 'type': 'str'}, + 'node_count': {'key': 'nodeCount', 'type': 'int'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'node_size_family': {'key': 'nodeSizeFamily', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, + 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, + 'pool_name': {'key': 'poolName', 'type': 'str'}, + } + + def __init__(self, *, auto_scale_properties=None, auto_pause_properties=None, spark_version: str=None, node_count: int=None, node_size: str=None, node_size_family: str=None, subscription_id: str=None, resource_group: str=None, workspace_name: str=None, pool_name: str=None, **kwargs) -> None: + super(SynapseSparkProperties, self).__init__(**kwargs) + self.auto_scale_properties = auto_scale_properties + self.auto_pause_properties = auto_pause_properties + self.spark_version = spark_version + self.node_count = node_count + self.node_size = node_size + self.node_size_family = node_size_family + self.subscription_id = subscription_id + self.resource_group = resource_group + self.workspace_name = workspace_name + self.pool_name = pool_name + + +class SystemData(Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. + Possible values include: 'User', 'Application', 'ManagedIdentity', 'Key' + :type created_by_type: str or + ~azure.mgmt.machinelearningservices.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the + resource. Possible values include: 'User', 'Application', + 'ManagedIdentity', 'Key' + :type last_modified_by_type: str or + ~azure.mgmt.machinelearningservices.models.CreatedByType + :param last_modified_at: The timestamp of resource last modification (UTC) + :type last_modified_at: datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__(self, *, created_by: str=None, created_by_type=None, created_at=None, last_modified_by: str=None, last_modified_by_type=None, last_modified_at=None, **kwargs) -> None: + super(SystemData, self).__init__(**kwargs) + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at + + +class SystemService(Model): + """A system service running on a compute. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar system_service_type: The type of this system service. + :vartype system_service_type: str + :ivar public_ip_address: Public IP address + :vartype public_ip_address: str + :ivar version: The version for this type. + :vartype version: str """ _validation = { - 'location': {'readonly': True}, - 'username': {'readonly': True}, + 'system_service_type': {'readonly': True}, + 'public_ip_address': {'readonly': True}, + 'version': {'readonly': True}, } _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'str'}, - 'passwords': {'key': 'passwords', 'type': '[Password]'}, + 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, } - def __init__(self, *, passwords=None, **kwargs) -> None: - super(RegistryListCredentialsResult, self).__init__(**kwargs) - self.location = None - self.username = None - self.passwords = passwords + def __init__(self, **kwargs) -> None: + super(SystemService, self).__init__(**kwargs) + self.system_service_type = None + self.public_ip_address = None + self.version = None -class ResourceId(Model): - """Represents a resource ID. For example, for a subnet, it is the resource URL - for the subnet. +class TensorFlow(DistributionConfiguration): + """TensorFlow distribution configuration. All required parameters must be populated in order to send to Azure. - :param id: Required. The ID of the resource - :type id: str + :param distribution_type: Required. Constant filled by server. + :type distribution_type: str + :param parameter_server_count: Number of parameter server tasks. + :type parameter_server_count: int + :param worker_count: Number of workers. Overwrites the node count in + compute binding. + :type worker_count: int """ _validation = { - 'id': {'required': True}, + 'distribution_type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + 'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'}, + 'worker_count': {'key': 'workerCount', 'type': 'int'}, } - def __init__(self, *, id: str, **kwargs) -> None: - super(ResourceId, self).__init__(**kwargs) - self.id = id + def __init__(self, *, parameter_server_count: int=None, worker_count: int=None, **kwargs) -> None: + super(TensorFlow, self).__init__(**kwargs) + self.parameter_server_count = parameter_server_count + self.worker_count = worker_count + self.distribution_type = 'TensorFlow' -class ScaleSettings(Model): - """scale settings for AML Compute. +class TrialComponent(Model): + """Trial component definition. All required parameters must be populated in order to send to Azure. - :param max_node_count: Required. Max number of nodes to use - :type max_node_count: int - :param min_node_count: Min number of nodes to use. Default value: 0 . - :type min_node_count: int - :param node_idle_time_before_scale_down: Node Idle Time before scaling - down amlCompute - :type node_idle_time_before_scale_down: timedelta + :param code_id: ARM resource ID of the code asset. + :type code_id: str + :param command: Required. The command to execute on startup of the job. + eg. "python train.py" + :type command: str + :param distribution: Distribution configuration of the job. If set, this + should be one of Mpi, Tensorflow, PyTorch, or null. + :type distribution: + ~azure.mgmt.machinelearningservices.models.DistributionConfiguration + :param environment_id: The ARM resource ID of the Environment + specification for the job. + :type environment_id: str + :param environment_variables: Environment variables included in the job. + :type environment_variables: dict[str, str] + :param input_data_bindings: Mapping of input data bindings used in the + job. + :type input_data_bindings: dict[str, + ~azure.mgmt.machinelearningservices.models.InputDataBinding] + :param output_data_bindings: Mapping of output data bindings used in the + job. + :type output_data_bindings: dict[str, + ~azure.mgmt.machinelearningservices.models.OutputDataBinding] + :param timeout: The max run duration in ISO 8601 format, after which the + trial component will be cancelled. + Only supports duration with precision as low as Seconds. + :type timeout: timedelta """ _validation = { - 'max_node_count': {'required': True}, + 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, } _attribute_map = { - 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, - 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, - 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, + 'code_id': {'key': 'codeId', 'type': 'str'}, + 'command': {'key': 'command', 'type': 'str'}, + 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, + 'environment_id': {'key': 'environmentId', 'type': 'str'}, + 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, + 'input_data_bindings': {'key': 'inputDataBindings', 'type': '{InputDataBinding}'}, + 'output_data_bindings': {'key': 'outputDataBindings', 'type': '{OutputDataBinding}'}, + 'timeout': {'key': 'timeout', 'type': 'duration'}, } - def __init__(self, *, max_node_count: int, min_node_count: int=0, node_idle_time_before_scale_down=None, **kwargs) -> None: - super(ScaleSettings, self).__init__(**kwargs) - self.max_node_count = max_node_count - self.min_node_count = min_node_count - self.node_idle_time_before_scale_down = node_idle_time_before_scale_down + def __init__(self, *, command: str, code_id: str=None, distribution=None, environment_id: str=None, environment_variables=None, input_data_bindings=None, output_data_bindings=None, timeout=None, **kwargs) -> None: + super(TrialComponent, self).__init__(**kwargs) + self.code_id = code_id + self.command = command + self.distribution = distribution + self.environment_id = environment_id + self.environment_variables = environment_variables + self.input_data_bindings = input_data_bindings + self.output_data_bindings = output_data_bindings + self.timeout = timeout -class ServicePrincipalCredentials(Model): - """Service principal credentials. +class TruncationSelectionPolicy(EarlyTerminationPolicy): + """Defines an early termination policy that cancels a given percentage of runs + at each evaluation interval. All required parameters must be populated in order to send to Azure. - :param client_id: Required. Client Id - :type client_id: str - :param client_secret: Required. Client secret - :type client_secret: str + :param delay_evaluation: Number of intervals by which to delay the first + evaluation. + :type delay_evaluation: int + :param evaluation_interval: Interval (number of runs) between policy + evaluations. + :type evaluation_interval: int + :param policy_type: Required. Constant filled by server. + :type policy_type: str + :param truncation_percentage: The percentage of runs to cancel at each + evaluation interval. + :type truncation_percentage: int """ _validation = { - 'client_id': {'required': True}, - 'client_secret': {'required': True}, + 'policy_type': {'required': True}, } _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, + 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, + 'policy_type': {'key': 'policyType', 'type': 'str'}, + 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'}, } - def __init__(self, *, client_id: str, client_secret: str, **kwargs) -> None: - super(ServicePrincipalCredentials, self).__init__(**kwargs) - self.client_id = client_id - self.client_secret = client_secret + def __init__(self, *, delay_evaluation: int=None, evaluation_interval: int=None, truncation_percentage: int=None, **kwargs) -> None: + super(TruncationSelectionPolicy, self).__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) + self.truncation_percentage = truncation_percentage + self.policy_type = 'TruncationSelection' -class SslConfiguration(Model): - """The ssl configuration for scoring. +class UpdateWorkspaceQuotas(Model): + """The properties for update Quota response. - :param status: Enable or disable ssl for scoring. Possible values include: - 'Disabled', 'Enabled' - :type status: str or ~azure.mgmt.machinelearningservices.models.enum - :param cert: Cert data - :type cert: str - :param key: Key data - :type key: str - :param cname: CNAME of the cert - :type cname: str + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar type: Specifies the resource type. + :vartype type: str + :param limit: Limit. The maximum permitted quota of the resource. + :type limit: long + :ivar unit: An enum describing the unit of quota measurement. Possible + values include: 'Count' + :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit + :param status: Update Workspace Quota Status. Status of update workspace + quota. Possible values include: 'Undefined', 'Success', 'Failure', + 'InvalidQuotaBelowClusterMinimum', 'InvalidQuotaExceedsSubscriptionLimit', + 'InvalidVMFamilyName', 'OperationNotSupportedForSku', + 'OperationNotEnabledForRegion' + :type status: str or ~azure.mgmt.machinelearningservices.models.Status """ + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'unit': {'readonly': True}, + } + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, - 'cert': {'key': 'cert', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'str'}, - 'cname': {'key': 'cname', 'type': 'str'}, } - def __init__(self, *, status=None, cert: str=None, key: str=None, cname: str=None, **kwargs) -> None: - super(SslConfiguration, self).__init__(**kwargs) + def __init__(self, *, limit: int=None, status=None, **kwargs) -> None: + super(UpdateWorkspaceQuotas, self).__init__(**kwargs) + self.id = None + self.type = None + self.limit = limit + self.unit = None self.status = status - self.cert = cert - self.key = key - self.cname = cname -class SystemService(Model): - """A system service running on a compute. +class UpdateWorkspaceQuotasResult(Model): + """The result of update workspace quota. Variables are only populated by the server, and will be ignored when sending a request. - :ivar system_service_type: The type of this system service. - :vartype system_service_type: str - :ivar public_ip_address: Public IP address - :vartype public_ip_address: str - :ivar version: The version for this type. - :vartype version: str + :ivar value: The list of workspace quota update result. + :vartype value: + list[~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotas] + :ivar next_link: The URI to fetch the next page of workspace quota update + result. Call ListNext() with this to fetch the next page of Workspace + Quota update result. + :vartype next_link: str """ _validation = { - 'system_service_type': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'version': {'readonly': True}, + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, } _attribute_map = { - 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__(self, **kwargs) -> None: - super(SystemService, self).__init__(**kwargs) - self.system_service_type = None - self.public_ip_address = None - self.version = None + super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs) + self.value = None + self.next_link = None class Usage(Model): @@ -1515,6 +8508,8 @@ class Usage(Model): :ivar id: Specifies the resource ID. :vartype id: str + :ivar aml_workspace_location: Region of the AML workspace in the id. + :vartype aml_workspace_location: str :ivar type: Specifies the resource type. :vartype type: str :ivar unit: An enum describing the unit of usage measurement. Possible @@ -1530,6 +8525,7 @@ class Usage(Model): _validation = { 'id': {'readonly': True}, + 'aml_workspace_location': {'readonly': True}, 'type': {'readonly': True}, 'unit': {'readonly': True}, 'current_value': {'readonly': True}, @@ -1539,6 +8535,7 @@ class Usage(Model): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, + 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'unit': {'key': 'unit', 'type': 'str'}, 'current_value': {'key': 'currentValue', 'type': 'long'}, @@ -1549,6 +8546,7 @@ class Usage(Model): def __init__(self, **kwargs) -> None: super(Usage, self).__init__(**kwargs) self.id = None + self.aml_workspace_location = None self.type = None self.unit = None self.current_value = None @@ -1618,6 +8616,63 @@ def __init__(self, *, admin_user_name: str, admin_user_ssh_public_key: str=None, self.admin_user_password = admin_user_password +class UserAssignedIdentity(Model): + """User Assigned Identity. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar principal_id: The principal ID of the user assigned identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of the user assigned identity. + :vartype tenant_id: str + :ivar client_id: The clientId(aka appId) of the user assigned identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(UserAssignedIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.client_id = None + + +class UserAssignedIdentityMeta(Model): + """User assigned identities associated with a resource. + + :param client_id: Aka application ID, a unique identifier generated by + Azure AD that is tied to an application and service principal during its + initial provisioning. + :type client_id: str + :param principal_id: The object ID of the service principal object for + your managed identity that is used to grant role-based access to an Azure + resource. + :type principal_id: str + """ + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + } + + def __init__(self, *, client_id: str=None, principal_id: str=None, **kwargs) -> None: + super(UserAssignedIdentityMeta, self).__init__(**kwargs) + self.client_id = client_id + self.principal_id = principal_id + + class VirtualMachine(Compute): """A Machine Learning compute based on Azure Virtual Machines. @@ -1636,19 +8691,22 @@ class VirtualMachine(Compute): ~azure.mgmt.machinelearningservices.models.ProvisioningState :param description: The description of the Machine Learning compute. :type description: str - :ivar created_on: The date and time when the compute was created. + :ivar created_on: The time at which the compute was created. :vartype created_on: datetime - :ivar modified_on: The date and time when the compute was last modified. + :ivar modified_on: The time at which the compute was last modified. :vartype modified_on: datetime :param resource_id: ARM resource id of the underlying compute :type resource_id: str :ivar provisioning_errors: Errors during provisioning :vartype provisioning_errors: - list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError] + list[~azure.mgmt.machinelearningservices.models.ErrorResponse] :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought from outside if true, or machine learning service provisioned it if false. :vartype is_attached_compute: bool + :param disable_local_auth: Opt-out of local authentication and ensure + customers can use only MSI and AAD exclusively for authentication. + :type disable_local_auth: bool :param compute_type: Required. Constant filled by server. :type compute_type: str :param properties: @@ -1672,18 +8730,41 @@ class VirtualMachine(Compute): 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, 'compute_type': {'key': 'computeType', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'}, } - def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, properties=None, **kwargs) -> None: - super(VirtualMachine, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) + def __init__(self, *, compute_location: str=None, description: str=None, resource_id: str=None, disable_local_auth: bool=None, properties=None, **kwargs) -> None: + super(VirtualMachine, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs) self.properties = properties self.compute_type = 'VirtualMachine' +class VirtualMachineImage(Model): + """Virtual Machine image for Windows AML Compute. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. Virtual Machine image path + :type id: str + """ + + _validation = { + 'id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__(self, *, id: str, **kwargs) -> None: + super(VirtualMachineImage, self).__init__(**kwargs) + self.id = id + + class VirtualMachineProperties(Model): """VirtualMachineProperties. @@ -1696,6 +8777,9 @@ class VirtualMachineProperties(Model): :param administrator_account: Admin credentials for virtual machine :type administrator_account: ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials + :param is_notebook_instance_compute: Indicates whether this compute will + be used for running notebooks. + :type is_notebook_instance_compute: bool """ _attribute_map = { @@ -1703,14 +8787,16 @@ class VirtualMachineProperties(Model): 'ssh_port': {'key': 'sshPort', 'type': 'int'}, 'address': {'key': 'address', 'type': 'str'}, 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + 'is_notebook_instance_compute': {'key': 'isNotebookInstanceCompute', 'type': 'bool'}, } - def __init__(self, *, virtual_machine_size: str=None, ssh_port: int=None, address: str=None, administrator_account=None, **kwargs) -> None: + def __init__(self, *, virtual_machine_size: str=None, ssh_port: int=None, address: str=None, administrator_account=None, is_notebook_instance_compute: bool=None, **kwargs) -> None: super(VirtualMachineProperties, self).__init__(**kwargs) self.virtual_machine_size = virtual_machine_size self.ssh_port = ssh_port self.address = address self.administrator_account = administrator_account + self.is_notebook_instance_compute = is_notebook_instance_compute class VirtualMachineSecrets(ComputeSecrets): @@ -1755,6 +8841,9 @@ class VirtualMachineSize(Model): :ivar v_cp_us: Number of vPUs. The number of vCPUs supported by the virtual machine size. :vartype v_cp_us: int + :ivar gpus: Number of gPUs. The number of gPUs supported by the virtual + machine size. + :vartype gpus: int :ivar os_vhd_size_mb: OS VHD Disk size. The OS VHD disk size, in MB, allowed by the virtual machine size. :vartype os_vhd_size_mb: int @@ -1770,12 +8859,17 @@ class VirtualMachineSize(Model): :ivar premium_io: Premium IO supported. Specifies if the virtual machine size supports premium IO. :vartype premium_io: bool + :param estimated_vm_prices: Estimated VM prices. The estimated price + information for using a VM. + :type estimated_vm_prices: + ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices """ _validation = { 'name': {'readonly': True}, 'family': {'readonly': True}, 'v_cp_us': {'readonly': True}, + 'gpus': {'readonly': True}, 'os_vhd_size_mb': {'readonly': True}, 'max_resource_volume_mb': {'readonly': True}, 'memory_gb': {'readonly': True}, @@ -1787,41 +8881,44 @@ class VirtualMachineSize(Model): 'name': {'key': 'name', 'type': 'str'}, 'family': {'key': 'family', 'type': 'str'}, 'v_cp_us': {'key': 'vCPUs', 'type': 'int'}, + 'gpus': {'key': 'gpus', 'type': 'int'}, 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'}, 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'}, 'memory_gb': {'key': 'memoryGB', 'type': 'float'}, 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'}, 'premium_io': {'key': 'premiumIO', 'type': 'bool'}, + 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVMPrices'}, } - def __init__(self, **kwargs) -> None: + def __init__(self, *, estimated_vm_prices=None, **kwargs) -> None: super(VirtualMachineSize, self).__init__(**kwargs) self.name = None self.family = None self.v_cp_us = None + self.gpus = None self.os_vhd_size_mb = None self.max_resource_volume_mb = None self.memory_gb = None self.low_priority_capable = None self.premium_io = None + self.estimated_vm_prices = estimated_vm_prices class VirtualMachineSizeListResult(Model): """The List Virtual Machine size operation response. - :param aml_compute: The list of virtual machine sizes supported by - AmlCompute. - :type aml_compute: + :param value: The list of virtual machine sizes supported by AmlCompute. + :type value: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] """ _attribute_map = { - 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'}, + 'value': {'key': 'value', 'type': '[VirtualMachineSize]'}, } - def __init__(self, *, aml_compute=None, **kwargs) -> None: + def __init__(self, *, value=None, **kwargs) -> None: super(VirtualMachineSizeListResult, self).__init__(**kwargs) - self.aml_compute = aml_compute + self.value = value class VirtualMachineSshCredentials(Model): @@ -1858,18 +8955,14 @@ class Workspace(Resource): Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: Specifies the resource ID. + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName} :vartype id: str - :ivar name: Specifies the name of the resource. + :ivar name: The name of the resource :vartype name: str - :ivar identity: The identity of the resource. - :vartype identity: ~azure.mgmt.machinelearningservices.models.Identity - :param location: Specifies the location of the resource. - :type location: str - :ivar type: Specifies the type of the resource. + :ivar type: The type of the resource. E.g. + "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts" :vartype type: str - :param tags: Contains resource tags defined as key/value pairs. - :type tags: dict[str, str] :ivar workspace_id: The immutable id associated with this workspace. :vartype workspace_id: str :param description: The description of this workspace. @@ -1877,9 +8970,6 @@ class Workspace(Resource): :param friendly_name: The friendly name for this workspace. This name in mutable :type friendly_name: str - :ivar creation_time: The creation time of the machine learning workspace - in ISO8601 format. - :vartype creation_time: datetime :param key_vault: ARM id of the key vault associated with this workspace. This cannot be changed once the workspace has been created :type key_vault: str @@ -1903,49 +8993,243 @@ class Workspace(Resource): 'Deleting', 'Succeeded', 'Failed', 'Canceled' :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState + :param encryption: The encryption settings of Azure ML workspace. + :type encryption: + ~azure.mgmt.machinelearningservices.models.EncryptionProperty + :param hbi_workspace: The flag to signal HBI data in the workspace and + reduce diagnostic data collected by the service. Default value: False . + :type hbi_workspace: bool + :ivar service_provisioned_resource_group: The name of the managed resource + group created by workspace RP in customer subscription if the workspace is + CMK workspace + :vartype service_provisioned_resource_group: str + :ivar private_link_count: Count of private connections in the workspace + :vartype private_link_count: int + :param image_build_compute: The compute name for image build + :type image_build_compute: str + :param allow_public_access_when_behind_vnet: The flag to indicate whether + to allow public access when behind VNet. Default value: False . + :type allow_public_access_when_behind_vnet: bool + :ivar private_endpoint_connections: The list of private endpoint + connections in the workspace. + :vartype private_endpoint_connections: + list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] + :param shared_private_link_resources: The list of shared private link + resources in this workspace. + :type shared_private_link_resources: + list[~azure.mgmt.machinelearningservices.models.SharedPrivateLinkResource] + :ivar notebook_info: The notebook info of Azure ML workspace. + :vartype notebook_info: + ~azure.mgmt.machinelearningservices.models.NotebookResourceInfo + :param service_managed_resources_settings: The service managed resource + settings. + :type service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :param primary_user_assigned_identity: The user assigned identity resource + id that represents the workspace identity. + :type primary_user_assigned_identity: str + :ivar tenant_id: The tenant id associated with this workspace. + :vartype tenant_id: str + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.machinelearningservices.models.Identity + :param location: Specifies the location of the resource. + :type location: str + :param tags: Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure.mgmt.machinelearningservices.models.Sku + :param system_data: + :type system_data: ~azure.mgmt.machinelearningservices.models.SystemData """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, - 'identity': {'readonly': True}, 'type': {'readonly': True}, 'workspace_id': {'readonly': True}, - 'creation_time': {'readonly': True}, 'provisioning_state': {'readonly': True}, + 'service_provisioned_resource_group': {'readonly': True}, + 'private_link_count': {'readonly': True}, + 'private_endpoint_connections': {'readonly': True}, + 'notebook_info': {'readonly': True}, + 'tenant_id': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'Identity'}, - 'location': {'key': 'location', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, 'description': {'key': 'properties.description', 'type': 'str'}, 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, 'key_vault': {'key': 'properties.keyVault', 'type': 'str'}, 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'}, 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'}, + 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'}, + 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'}, + 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'}, + 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, + 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'}, + 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'}, + 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'}, + 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'}, + 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, } - def __init__(self, *, location: str=None, tags=None, description: str=None, friendly_name: str=None, key_vault: str=None, application_insights: str=None, container_registry: str=None, storage_account: str=None, discovery_url: str=None, **kwargs) -> None: - super(Workspace, self).__init__(location=location, tags=tags, **kwargs) + def __init__(self, *, description: str=None, friendly_name: str=None, key_vault: str=None, application_insights: str=None, container_registry: str=None, storage_account: str=None, discovery_url: str=None, encryption=None, hbi_workspace: bool=False, image_build_compute: str=None, allow_public_access_when_behind_vnet: bool=False, shared_private_link_resources=None, service_managed_resources_settings=None, primary_user_assigned_identity: str=None, identity=None, location: str=None, tags=None, sku=None, system_data=None, **kwargs) -> None: + super(Workspace, self).__init__(**kwargs) self.workspace_id = None self.description = description self.friendly_name = friendly_name - self.creation_time = None self.key_vault = key_vault self.application_insights = application_insights self.container_registry = container_registry self.storage_account = storage_account self.discovery_url = discovery_url self.provisioning_state = None + self.encryption = encryption + self.hbi_workspace = hbi_workspace + self.service_provisioned_resource_group = None + self.private_link_count = None + self.image_build_compute = image_build_compute + self.allow_public_access_when_behind_vnet = allow_public_access_when_behind_vnet + self.private_endpoint_connections = None + self.shared_private_link_resources = shared_private_link_resources + self.notebook_info = None + self.service_managed_resources_settings = service_managed_resources_settings + self.primary_user_assigned_identity = primary_user_assigned_identity + self.tenant_id = None + self.identity = identity + self.location = location + self.tags = tags + self.sku = sku + self.system_data = system_data + + +class WorkspaceConnection(Model): + """Workspace connection. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: ResourceId of the workspace connection. + :vartype id: str + :ivar name: Friendly name of the workspace connection. + :vartype name: str + :ivar type: Resource type of workspace connection. + :vartype type: str + :param category: Category of the workspace connection. + :type category: str + :param target: Target of the workspace connection. + :type target: str + :param auth_type: Authorization type of the workspace connection. + :type auth_type: str + :param value: Value details of the workspace connection. + :type value: str + :param value_format: format for the workspace connection value. Possible + values include: 'JSON' + :type value_format: str or + ~azure.mgmt.machinelearningservices.models.ValueFormat + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'category': {'key': 'properties.category', 'type': 'str'}, + 'target': {'key': 'properties.target', 'type': 'str'}, + 'auth_type': {'key': 'properties.authType', 'type': 'str'}, + 'value': {'key': 'properties.value', 'type': 'str'}, + 'value_format': {'key': 'properties.valueFormat', 'type': 'str'}, + } + + def __init__(self, *, category: str=None, target: str=None, auth_type: str=None, value: str=None, value_format=None, **kwargs) -> None: + super(WorkspaceConnection, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.category = category + self.target = target + self.auth_type = auth_type + self.value = value + self.value_format = value_format + + +class WorkspaceSku(Model): + """Describes Workspace Sku details and features. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar locations: The set of locations that the SKU is available. This will + be supported and registered Azure Geo Regions (e.g. West US, East US, + Southeast Asia, etc.). + :vartype locations: list[str] + :ivar location_info: A list of locations and availability zones in those + locations where the SKU is available. + :vartype location_info: + list[~azure.mgmt.machinelearningservices.models.ResourceSkuLocationInfo] + :ivar tier: Sku Tier like Basic or Enterprise + :vartype tier: str + :ivar resource_type: + :vartype resource_type: str + :ivar name: + :vartype name: str + :ivar capabilities: List of features/user capabilities associated with the + sku + :vartype capabilities: + list[~azure.mgmt.machinelearningservices.models.SKUCapability] + :param restrictions: The restrictions because of which SKU cannot be used. + This is empty if there are no restrictions. + :type restrictions: + list[~azure.mgmt.machinelearningservices.models.Restriction] + """ + + _validation = { + 'locations': {'readonly': True}, + 'location_info': {'readonly': True}, + 'tier': {'readonly': True}, + 'resource_type': {'readonly': True}, + 'name': {'readonly': True}, + 'capabilities': {'readonly': True}, + } + + _attribute_map = { + 'locations': {'key': 'locations', 'type': '[str]'}, + 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'}, + 'tier': {'key': 'tier', 'type': 'str'}, + 'resource_type': {'key': 'resourceType', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'}, + 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'}, + } + + def __init__(self, *, restrictions=None, **kwargs) -> None: + super(WorkspaceSku, self).__init__(**kwargs) + self.locations = None + self.location_info = None + self.tier = None + self.resource_type = None + self.name = None + self.capabilities = None + self.restrictions = restrictions class WorkspaceUpdateParameters(Model): @@ -1953,20 +9237,43 @@ class WorkspaceUpdateParameters(Model): :param tags: The resource tags for the machine learning workspace. :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure.mgmt.machinelearningservices.models.Sku + :param identity: The identity of the resource. + :type identity: ~azure.mgmt.machinelearningservices.models.Identity :param description: The description of this workspace. :type description: str :param friendly_name: The friendly name for this workspace. :type friendly_name: str + :param image_build_compute: The compute name for image build + :type image_build_compute: str + :param service_managed_resources_settings: The service managed resource + settings. + :type service_managed_resources_settings: + ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings + :param primary_user_assigned_identity: The user assigned identity resource + id that represents the workspace identity. + :type primary_user_assigned_identity: str """ _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, 'description': {'key': 'properties.description', 'type': 'str'}, 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, + 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, + 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'}, + 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'}, } - def __init__(self, *, tags=None, description: str=None, friendly_name: str=None, **kwargs) -> None: + def __init__(self, *, tags=None, sku=None, identity=None, description: str=None, friendly_name: str=None, image_build_compute: str=None, service_managed_resources_settings=None, primary_user_assigned_identity: str=None, **kwargs) -> None: super(WorkspaceUpdateParameters, self).__init__(**kwargs) self.tags = tags + self.sku = sku + self.identity = identity self.description = description self.friendly_name = friendly_name + self.image_build_compute = image_build_compute + self.service_managed_resources_settings = service_managed_resources_settings + self.primary_user_assigned_identity = primary_user_assigned_identity diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_paged_models.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_paged_models.py index 9bc3567bbf90..00d53f7872e8 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_paged_models.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_paged_models.py @@ -51,6 +51,19 @@ class UsagePaged(Paged): def __init__(self, *args, **kwargs): super(UsagePaged, self).__init__(*args, **kwargs) +class ResourceQuotaPaged(Paged): + """ + A paging container for iterating over a list of :class:`ResourceQuota ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[ResourceQuota]'} + } + + def __init__(self, *args, **kwargs): + + super(ResourceQuotaPaged, self).__init__(*args, **kwargs) class ComputeResourcePaged(Paged): """ A paging container for iterating over a list of :class:`ComputeResource ` object @@ -64,3 +77,263 @@ class ComputeResourcePaged(Paged): def __init__(self, *args, **kwargs): super(ComputeResourcePaged, self).__init__(*args, **kwargs) +class AmlComputeNodeInformationPaged(Paged): + """ + A paging container for iterating over a list of :class:`AmlComputeNodeInformation ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'} + } + + def __init__(self, *args, **kwargs): + + super(AmlComputeNodeInformationPaged, self).__init__(*args, **kwargs) +class PrivateEndpointConnectionPaged(Paged): + """ + A paging container for iterating over a list of :class:`PrivateEndpointConnection ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[PrivateEndpointConnection]'} + } + + def __init__(self, *args, **kwargs): + + super(PrivateEndpointConnectionPaged, self).__init__(*args, **kwargs) +class WorkspaceConnectionPaged(Paged): + """ + A paging container for iterating over a list of :class:`WorkspaceConnection ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[WorkspaceConnection]'} + } + + def __init__(self, *args, **kwargs): + + super(WorkspaceConnectionPaged, self).__init__(*args, **kwargs) +class BatchEndpointTrackedResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`BatchEndpointTrackedResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[BatchEndpointTrackedResource]'} + } + + def __init__(self, *args, **kwargs): + + super(BatchEndpointTrackedResourcePaged, self).__init__(*args, **kwargs) +class BatchDeploymentTrackedResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`BatchDeploymentTrackedResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[BatchDeploymentTrackedResource]'} + } + + def __init__(self, *args, **kwargs): + + super(BatchDeploymentTrackedResourcePaged, self).__init__(*args, **kwargs) +class CodeContainerResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`CodeContainerResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[CodeContainerResource]'} + } + + def __init__(self, *args, **kwargs): + + super(CodeContainerResourcePaged, self).__init__(*args, **kwargs) +class CodeVersionResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`CodeVersionResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[CodeVersionResource]'} + } + + def __init__(self, *args, **kwargs): + + super(CodeVersionResourcePaged, self).__init__(*args, **kwargs) +class DataContainerResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`DataContainerResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[DataContainerResource]'} + } + + def __init__(self, *args, **kwargs): + + super(DataContainerResourcePaged, self).__init__(*args, **kwargs) +class DataVersionResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`DataVersionResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[DataVersionResource]'} + } + + def __init__(self, *args, **kwargs): + + super(DataVersionResourcePaged, self).__init__(*args, **kwargs) +class DatastorePropertiesResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`DatastorePropertiesResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[DatastorePropertiesResource]'} + } + + def __init__(self, *args, **kwargs): + + super(DatastorePropertiesResourcePaged, self).__init__(*args, **kwargs) +class EnvironmentContainerResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`EnvironmentContainerResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[EnvironmentContainerResource]'} + } + + def __init__(self, *args, **kwargs): + + super(EnvironmentContainerResourcePaged, self).__init__(*args, **kwargs) +class EnvironmentSpecificationVersionResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`EnvironmentSpecificationVersionResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[EnvironmentSpecificationVersionResource]'} + } + + def __init__(self, *args, **kwargs): + + super(EnvironmentSpecificationVersionResourcePaged, self).__init__(*args, **kwargs) +class JobBaseResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`JobBaseResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[JobBaseResource]'} + } + + def __init__(self, *args, **kwargs): + + super(JobBaseResourcePaged, self).__init__(*args, **kwargs) +class LabelingJobResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`LabelingJobResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[LabelingJobResource]'} + } + + def __init__(self, *args, **kwargs): + + super(LabelingJobResourcePaged, self).__init__(*args, **kwargs) +class ModelContainerResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`ModelContainerResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[ModelContainerResource]'} + } + + def __init__(self, *args, **kwargs): + + super(ModelContainerResourcePaged, self).__init__(*args, **kwargs) +class ModelVersionResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`ModelVersionResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[ModelVersionResource]'} + } + + def __init__(self, *args, **kwargs): + + super(ModelVersionResourcePaged, self).__init__(*args, **kwargs) +class OnlineEndpointTrackedResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`OnlineEndpointTrackedResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[OnlineEndpointTrackedResource]'} + } + + def __init__(self, *args, **kwargs): + + super(OnlineEndpointTrackedResourcePaged, self).__init__(*args, **kwargs) +class OnlineDeploymentTrackedResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`OnlineDeploymentTrackedResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[OnlineDeploymentTrackedResource]'} + } + + def __init__(self, *args, **kwargs): + + super(OnlineDeploymentTrackedResourcePaged, self).__init__(*args, **kwargs) +class AmlUserFeaturePaged(Paged): + """ + A paging container for iterating over a list of :class:`AmlUserFeature ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[AmlUserFeature]'} + } + + def __init__(self, *args, **kwargs): + + super(AmlUserFeaturePaged, self).__init__(*args, **kwargs) +class WorkspaceSkuPaged(Paged): + """ + A paging container for iterating over a list of :class:`WorkspaceSku ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[WorkspaceSku]'} + } + + def __init__(self, *args, **kwargs): + + super(WorkspaceSkuPaged, self).__init__(*args, **kwargs) diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py index 4daae17a67dd..ead44a1cf203 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/__init__.py @@ -13,12 +13,54 @@ from ._workspaces_operations import WorkspacesOperations from ._usages_operations import UsagesOperations from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations -from ._machine_learning_compute_operations import MachineLearningComputeOperations +from ._quotas_operations import QuotasOperations +from ._compute_operations import ComputeOperations +from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations +from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._workspace_connections_operations import WorkspaceConnectionsOperations +from ._batch_endpoints_operations import BatchEndpointsOperations +from ._batch_deployments_operations import BatchDeploymentsOperations +from ._code_containers_operations import CodeContainersOperations +from ._code_versions_operations import CodeVersionsOperations +from ._data_containers_operations import DataContainersOperations +from ._data_versions_operations import DataVersionsOperations +from ._datastores_operations import DatastoresOperations +from ._environment_containers_operations import EnvironmentContainersOperations +from ._environment_specification_versions_operations import EnvironmentSpecificationVersionsOperations +from ._jobs_operations import JobsOperations +from ._labeling_jobs_operations import LabelingJobsOperations +from ._model_containers_operations import ModelContainersOperations +from ._model_versions_operations import ModelVersionsOperations +from ._online_endpoints_operations import OnlineEndpointsOperations +from ._online_deployments_operations import OnlineDeploymentsOperations +from ._workspace_features_operations import WorkspaceFeaturesOperations +from ._workspace_skus_operations import WorkspaceSkusOperations __all__ = [ 'Operations', 'WorkspacesOperations', 'UsagesOperations', 'VirtualMachineSizesOperations', - 'MachineLearningComputeOperations', + 'QuotasOperations', + 'ComputeOperations', + 'PrivateEndpointConnectionsOperations', + 'PrivateLinkResourcesOperations', + 'WorkspaceConnectionsOperations', + 'BatchEndpointsOperations', + 'BatchDeploymentsOperations', + 'CodeContainersOperations', + 'CodeVersionsOperations', + 'DataContainersOperations', + 'DataVersionsOperations', + 'DatastoresOperations', + 'EnvironmentContainersOperations', + 'EnvironmentSpecificationVersionsOperations', + 'JobsOperations', + 'LabelingJobsOperations', + 'ModelContainersOperations', + 'ModelVersionsOperations', + 'OnlineEndpointsOperations', + 'OnlineDeploymentsOperations', + 'WorkspaceFeaturesOperations', + 'WorkspaceSkusOperations', ] diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py new file mode 100644 index 000000000000..ffa68286b923 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_deployments_operations.py @@ -0,0 +1,410 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class BatchDeploymentsOperations(object): + """BatchDeploymentsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, endpoint_name, resource_group_name, workspace_name, order_by=None, top=None, skip=None, custom_headers=None, raw=False, **operation_config): + """Lists Batch inference deployments in the workspace. + + :param endpoint_name: Endpoint name + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param order_by: Ordering of list. + :type order_by: str + :param top: Top of list. + :type top: int + :param skip: Continuation token for pagination. + :type skip: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of BatchDeploymentTrackedResource + :rtype: + ~azure.mgmt.machinelearningservices.models.BatchDeploymentTrackedResourcePaged[~azure.mgmt.machinelearningservices.models.BatchDeploymentTrackedResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if order_by is not None: + query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int') + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.BatchDeploymentTrackedResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments'} + + def delete( + self, endpoint_name, deployment_name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Delete Batch Inference deployment. + + :param endpoint_name: Endpoint name + :type endpoint_name: str + :param deployment_name: Inference deployment identifier. + :type deployment_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}'} + + def get( + self, endpoint_name, deployment_name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Gets a batch inference deployment by id. + + :param endpoint_name: Endpoint name + :type endpoint_name: str + :param deployment_name: The identifier for the Batch deployments. + :type deployment_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: BatchDeploymentTrackedResource or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.BatchDeploymentTrackedResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('BatchDeploymentTrackedResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}'} + + def update( + self, endpoint_name, deployment_name, resource_group_name, workspace_name, body, custom_headers=None, raw=False, **operation_config): + """Update a batch inference deployment. + + :param endpoint_name: Inference endpoint name + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference + deployment. + :type deployment_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param body: Batch inference deployment definition object. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialBatchDeploymentPartialTrackedResource + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: BatchDeploymentTrackedResource or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.BatchDeploymentTrackedResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'PartialBatchDeploymentPartialTrackedResource') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('BatchDeploymentTrackedResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}'} + + def create_or_update( + self, endpoint_name, deployment_name, resource_group_name, workspace_name, body, custom_headers=None, raw=False, **operation_config): + """Creates/updates a batch inference deployment. + + :param endpoint_name: Inference endpoint name + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference + deployment. + :type deployment_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param body: Batch inference deployment definition object. + :type body: + ~azure.mgmt.machinelearningservices.models.BatchDeploymentTrackedResource + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: BatchDeploymentTrackedResource or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.BatchDeploymentTrackedResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'BatchDeploymentTrackedResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('BatchDeploymentTrackedResource', response) + if response.status_code == 201: + deserialized = self._deserialize('BatchDeploymentTrackedResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py new file mode 100644 index 000000000000..61cc44de8cb8 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_batch_endpoints_operations.py @@ -0,0 +1,451 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class BatchEndpointsOperations(object): + """BatchEndpointsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, resource_group_name, workspace_name, count=None, skip=None, custom_headers=None, raw=False, **operation_config): + """Lists Batch inference endpoint in the workspace. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param count: Number of endpoints to be retrieved in a page of + results. + :type count: int + :param skip: Continuation token for pagination. + :type skip: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of BatchEndpointTrackedResource + :rtype: + ~azure.mgmt.machinelearningservices.models.BatchEndpointTrackedResourcePaged[~azure.mgmt.machinelearningservices.models.BatchEndpointTrackedResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if count is not None: + query_parameters['count'] = self._serialize.query("count", count, 'int') + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.BatchEndpointTrackedResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints'} + + def delete( + self, endpoint_name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Delete Batch Inference Endpoint. + + :param endpoint_name: Inference Endpoint name. + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}'} + + def get( + self, endpoint_name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Gets a batch inference endpoint by name. + + :param endpoint_name: Name for the Batch Endpoint. + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: BatchEndpointTrackedResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.BatchEndpointTrackedResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('BatchEndpointTrackedResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}'} + + def update( + self, endpoint_name, resource_group_name, workspace_name, body, custom_headers=None, raw=False, **operation_config): + """Update a batch inference endpoint. + + :param endpoint_name: Name for the Batch inference endpoint. + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param body: Mutable batch inference endpoint definition object. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialBatchEndpointPartialTrackedResource + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: BatchEndpointTrackedResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.BatchEndpointTrackedResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'PartialBatchEndpointPartialTrackedResource') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('BatchEndpointTrackedResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}'} + + def create_or_update( + self, endpoint_name, resource_group_name, workspace_name, body, custom_headers=None, raw=False, **operation_config): + """Creates a batch inference endpoint. + + :param endpoint_name: Name for the Batch inference endpoint. + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param body: Batch inference endpoint definition object. + :type body: + ~azure.mgmt.machinelearningservices.models.BatchEndpointTrackedResource + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: BatchEndpointTrackedResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.BatchEndpointTrackedResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'BatchEndpointTrackedResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('BatchEndpointTrackedResource', response) + if response.status_code == 201: + deserialized = self._deserialize('BatchEndpointTrackedResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}'} + + def list_keys( + self, endpoint_name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Lists batch Inference Endpoint keys. + + :param endpoint_name: Inference Endpoint name. + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: EndpointAuthKeys or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.list_keys.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('EndpointAuthKeys', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py new file mode 100644 index 000000000000..ab4df3cf38db --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_containers_operations.py @@ -0,0 +1,312 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class CodeContainersOperations(object): + """CodeContainersOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, resource_group_name, workspace_name, skip=None, custom_headers=None, raw=False, **operation_config): + """List containers. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. + :type skip: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of CodeContainerResource + :rtype: + ~azure.mgmt.machinelearningservices.models.CodeContainerResourcePaged[~azure.mgmt.machinelearningservices.models.CodeContainerResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.CodeContainerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes'} + + def delete( + self, name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Delete container. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} + + def get( + self, name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Get container. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: CodeContainerResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.CodeContainerResource or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('CodeContainerResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} + + def create_or_update( + self, name, resource_group_name, workspace_name, properties, custom_headers=None, raw=False, **operation_config): + """Create or update container. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.CodeContainer + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: CodeContainerResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.CodeContainerResource or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + body = models.CodeContainerResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'CodeContainerResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('CodeContainerResource', response) + if response.status_code == 201: + deserialized = self._deserialize('CodeContainerResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py new file mode 100644 index 000000000000..1452f6cf8ea6 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_code_versions_operations.py @@ -0,0 +1,330 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class CodeVersionsOperations(object): + """CodeVersionsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, name, resource_group_name, workspace_name, order_by=None, top=None, skip=None, custom_headers=None, raw=False, **operation_config): + """List versions. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param order_by: Ordering of list. + :type order_by: str + :param top: Maximum number of records to return. + :type top: int + :param skip: Continuation token for pagination. + :type skip: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of CodeVersionResource + :rtype: + ~azure.mgmt.machinelearningservices.models.CodeVersionResourcePaged[~azure.mgmt.machinelearningservices.models.CodeVersionResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if order_by is not None: + query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int') + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.CodeVersionResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions'} + + def delete( + self, name, version, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Delete version. + + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'version': self._serialize.url("version", version, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} + + def get( + self, name, version, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Get version. + + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: CodeVersionResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersionResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'version': self._serialize.url("version", version, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('CodeVersionResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} + + def create_or_update( + self, name, version, resource_group_name, workspace_name, properties, custom_headers=None, raw=False, **operation_config): + """Create or update version. + + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.CodeVersion + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: CodeVersionResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersionResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + body = models.CodeVersionResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'version': self._serialize.url("version", version, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'CodeVersionResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('CodeVersionResource', response) + if response.status_code == 201: + deserialized = self._deserialize('CodeVersionResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py new file mode 100644 index 000000000000..3816b4f5e04a --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_compute_operations.py @@ -0,0 +1,946 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class ComputeOperations(object): + """ComputeOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, resource_group_name, workspace_name, skip=None, custom_headers=None, raw=False, **operation_config): + """Gets computes in specified workspace. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. + :type skip: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of ComputeResource + :rtype: + ~azure.mgmt.machinelearningservices.models.ComputeResourcePaged[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.ComputeResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} + + def get( + self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, **operation_config): + """Gets compute definition by its name. Any secrets (storage keys, service + credentials, etc) are not returned - use 'keys' nested resource to get + them. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ComputeResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResource or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ComputeResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} + + + def _create_or_update_initial( + self, resource_group_name, workspace_name, compute_name, parameters, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'ComputeResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + header_dict = {} + + if response.status_code == 200: + deserialized = self._deserialize('ComputeResource', response) + header_dict = { + 'Azure-AsyncOperation': 'str', + } + if response.status_code == 201: + deserialized = self._deserialize('ComputeResource', response) + header_dict = { + 'Azure-AsyncOperation': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + def create_or_update( + self, resource_group_name, workspace_name, compute_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): + """Creates or updates compute. This call will overwrite a compute if it + exists. This is a nonrecoverable operation. If your intent is to create + a new compute, do a GET first to verify that it does not exist yet. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param parameters: Payload with Machine Learning compute definition. + :type parameters: + ~azure.mgmt.machinelearningservices.models.ComputeResource + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns ComputeResource or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.ComputeResource]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + parameters=parameters, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + header_dict = { + 'Azure-AsyncOperation': 'str', + } + deserialized = self._deserialize('ComputeResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} + + + def _update_initial( + self, resource_group_name, workspace_name, compute_name, scale_settings=None, custom_headers=None, raw=False, **operation_config): + parameters = models.ClusterUpdateParameters(scale_settings=scale_settings) + + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'ClusterUpdateParameters') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('ComputeResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def update( + self, resource_group_name, workspace_name, compute_name, scale_settings=None, custom_headers=None, raw=False, polling=True, **operation_config): + """Updates properties of a compute. This call will overwrite a compute if + it exists. This is a nonrecoverable operation. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param scale_settings: Scale settings. Desired scale settings for the + amlCompute. + :type scale_settings: + ~azure.mgmt.machinelearningservices.models.ScaleSettings + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns ComputeResource or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.ComputeResource]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + scale_settings=scale_settings, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('ComputeResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} + + + def _delete_initial( + self, resource_group_name, workspace_name, compute_name, underlying_resource_action, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + header_dict = { + 'Azure-AsyncOperation': 'str', + 'Location': 'str', + } + client_raw_response.add_headers(header_dict) + return client_raw_response + + def delete( + self, resource_group_name, workspace_name, compute_name, underlying_resource_action, custom_headers=None, raw=False, polling=True, **operation_config): + """Deletes specified Machine Learning compute. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param underlying_resource_action: Delete the underlying compute if + 'Delete', or detach the underlying compute from workspace if 'Detach'. + Possible values include: 'Delete', 'Detach' + :type underlying_resource_action: str or + ~azure.mgmt.machinelearningservices.models.UnderlyingResourceAction + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + underlying_resource_action=underlying_resource_action, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + client_raw_response.add_headers({ + 'Azure-AsyncOperation': 'str', + 'Location': 'str', + }) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} + + def list_nodes( + self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, **operation_config): + """Get the details (e.g IP address, port etc) of all the compute nodes in + the compute. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of AmlComputeNodeInformation + :rtype: + ~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformationPaged[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_nodes.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.AmlComputeNodeInformationPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} + + def list_keys( + self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, **operation_config): + """Gets secrets related to Machine Learning compute (storage keys, service + credentials, etc). + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ComputeSecrets or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.ComputeSecrets or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.list_keys.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ComputeSecrets', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} + + + def _start_initial( + self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.start.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [202]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def start( + self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Posts a start action to a compute instance. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._start_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} + + + def _stop_initial( + self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.stop.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [202]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + def stop( + self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Posts a stop action to a compute instance. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._stop_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} + + def restart( + self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, **operation_config): + """Posts a restart action to a compute instance. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.restart.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} + + def update_schedules( + self, resource_group_name, workspace_name, compute_name, compute_start_stop=None, custom_headers=None, raw=False, **operation_config): + """Updates schedules of a compute instance. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param compute_start_stop: The list of compute start stop schedules to + be applied. + :type compute_start_stop: + list[~azure.mgmt.machinelearningservices.models.ComputeStartStopSchedule] + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + parameters = None + if compute_start_stop is not None: + parameters = models.ComputeSchedules(compute_start_stop=compute_start_stop) + + # Construct URL + url = self.update_schedules.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + if parameters is not None: + body_content = self._serialize.body(parameters, 'ComputeSchedules') + else: + body_content = None + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + update_schedules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateSchedules'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py new file mode 100644 index 000000000000..6086d77be9c2 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_containers_operations.py @@ -0,0 +1,312 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class DataContainersOperations(object): + """DataContainersOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, resource_group_name, workspace_name, skip=None, custom_headers=None, raw=False, **operation_config): + """List containers. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. + :type skip: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of DataContainerResource + :rtype: + ~azure.mgmt.machinelearningservices.models.DataContainerResourcePaged[~azure.mgmt.machinelearningservices.models.DataContainerResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.DataContainerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data'} + + def delete( + self, name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Delete container. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} + + def get( + self, name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Get container. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DataContainerResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.DataContainerResource or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataContainerResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} + + def create_or_update( + self, name, resource_group_name, workspace_name, properties, custom_headers=None, raw=False, **operation_config): + """Create or update container. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.DataContainer + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DataContainerResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.DataContainerResource or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + body = models.DataContainerResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'DataContainerResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataContainerResource', response) + if response.status_code == 201: + deserialized = self._deserialize('DataContainerResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py new file mode 100644 index 000000000000..901e2c9cd618 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_data_versions_operations.py @@ -0,0 +1,335 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class DataVersionsOperations(object): + """DataVersionsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, name, resource_group_name, workspace_name, order_by=None, top=None, skip=None, tags=None, custom_headers=None, raw=False, **operation_config): + """List data versions. + + :param name: Data name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param order_by: Ordering of list. + :type order_by: str + :param top: Maximum number of records to return. + :type top: int + :param skip: Continuation token for pagination. + :type skip: str + :param tags: Comma-separated list of tag names (and optionally + values). Example: tag1,tag2=value2 + :type tags: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of DataVersionResource + :rtype: + ~azure.mgmt.machinelearningservices.models.DataVersionResourcePaged[~azure.mgmt.machinelearningservices.models.DataVersionResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if order_by is not None: + query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int') + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + if tags is not None: + query_parameters['$tags'] = self._serialize.query("tags", tags, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.DataVersionResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions'} + + def delete( + self, name, version, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Delete version. + + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'version': self._serialize.url("version", version, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} + + def get( + self, name, version, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Get version. + + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DataVersionResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'version': self._serialize.url("version", version, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataVersionResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} + + def create_or_update( + self, name, version, resource_group_name, workspace_name, properties, custom_headers=None, raw=False, **operation_config): + """Create or update version. + + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.DataVersion + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DataVersionResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + body = models.DataVersionResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'version': self._serialize.url("version", version, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'DataVersionResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DataVersionResource', response) + if response.status_code == 201: + deserialized = self._deserialize('DataVersionResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py new file mode 100644 index 000000000000..fa123c996b4e --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_datastores_operations.py @@ -0,0 +1,405 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class DatastoresOperations(object): + """DatastoresOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, resource_group_name, workspace_name, skip=None, count=30, is_default=None, names=None, search_text=None, order_by=None, order_by_asc=False, custom_headers=None, raw=False, **operation_config): + """List datastores. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. + :type skip: str + :param count: Maximum number of results to return. + :type count: int + :param is_default: Filter down to the workspace default datastore. + :type is_default: bool + :param names: Names of datastores to return. + :type names: list[str] + :param search_text: Text to search for in the datastore names. + :type search_text: str + :param order_by: Order by property (createdtime | modifiedtime | + name). + :type order_by: str + :param order_by_asc: Order by property in ascending order. + :type order_by_asc: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of DatastorePropertiesResource + :rtype: + ~azure.mgmt.machinelearningservices.models.DatastorePropertiesResourcePaged[~azure.mgmt.machinelearningservices.models.DatastorePropertiesResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + if count is not None: + query_parameters['count'] = self._serialize.query("count", count, 'int') + if is_default is not None: + query_parameters['isDefault'] = self._serialize.query("is_default", is_default, 'bool') + if names is not None: + query_parameters['names'] = self._serialize.query("names", names, '[str]', div=',') + if search_text is not None: + query_parameters['searchText'] = self._serialize.query("search_text", search_text, 'str') + if order_by is not None: + query_parameters['orderBy'] = self._serialize.query("order_by", order_by, 'str') + if order_by_asc is not None: + query_parameters['orderByAsc'] = self._serialize.query("order_by_asc", order_by_asc, 'bool') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.DatastorePropertiesResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores'} + + def delete( + self, name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Delete datastore. + + :param name: Datastore name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} + + def get( + self, name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Get datastore. + + :param name: Datastore name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DatastorePropertiesResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.DatastorePropertiesResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DatastorePropertiesResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} + + def create_or_update( + self, name, resource_group_name, workspace_name, properties, skip_validation=False, custom_headers=None, raw=False, **operation_config): + """Create or update datastore. + + :param name: Datastore name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.DatastoreProperties + :param skip_validation: Flag to skip validation. + :type skip_validation: bool + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DatastorePropertiesResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.DatastorePropertiesResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + body = models.DatastorePropertiesResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if skip_validation is not None: + query_parameters['skipValidation'] = self._serialize.query("skip_validation", skip_validation, 'bool') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'DatastorePropertiesResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DatastorePropertiesResource', response) + if response.status_code == 201: + deserialized = self._deserialize('DatastorePropertiesResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}'} + + def list_secrets( + self, name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Get datastore secrets. + + :param name: Datastore name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DatastoreSecrets or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.DatastoreSecrets or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.list_secrets.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DatastoreSecrets', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list_secrets.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py new file mode 100644 index 000000000000..7dd75bfa9147 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_containers_operations.py @@ -0,0 +1,312 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class EnvironmentContainersOperations(object): + """EnvironmentContainersOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, resource_group_name, workspace_name, skip=None, custom_headers=None, raw=False, **operation_config): + """List containers. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. + :type skip: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of EnvironmentContainerResource + :rtype: + ~azure.mgmt.machinelearningservices.models.EnvironmentContainerResourcePaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainerResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.EnvironmentContainerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments'} + + def delete( + self, name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Delete container. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} + + def get( + self, name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Get container. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: EnvironmentContainerResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.EnvironmentContainerResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('EnvironmentContainerResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} + + def create_or_update( + self, name, resource_group_name, workspace_name, properties, custom_headers=None, raw=False, **operation_config): + """Create or update container. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: EnvironmentContainerResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.EnvironmentContainerResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + body = models.EnvironmentContainerResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'EnvironmentContainerResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('EnvironmentContainerResource', response) + if response.status_code == 201: + deserialized = self._deserialize('EnvironmentContainerResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_specification_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_specification_versions_operations.py new file mode 100644 index 000000000000..0361511d7fa8 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_environment_specification_versions_operations.py @@ -0,0 +1,335 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class EnvironmentSpecificationVersionsOperations(object): + """EnvironmentSpecificationVersionsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, name, resource_group_name, workspace_name, order_by=None, top=None, skip=None, custom_headers=None, raw=False, **operation_config): + """List versions. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param order_by: Ordering of list. + :type order_by: str + :param top: Maximum number of records to return. + :type top: int + :param skip: Continuation token for pagination. + :type skip: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of + EnvironmentSpecificationVersionResource + :rtype: + ~azure.mgmt.machinelearningservices.models.EnvironmentSpecificationVersionResourcePaged[~azure.mgmt.machinelearningservices.models.EnvironmentSpecificationVersionResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if order_by is not None: + query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int') + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.EnvironmentSpecificationVersionResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions'} + + def delete( + self, name, version, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Delete version. + + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'version': self._serialize.url("version", version, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} + + def get( + self, name, version, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Get version. + + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: EnvironmentSpecificationVersionResource or ClientRawResponse + if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.EnvironmentSpecificationVersionResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'version': self._serialize.url("version", version, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('EnvironmentSpecificationVersionResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} + + def create_or_update( + self, name, version, resource_group_name, workspace_name, properties, custom_headers=None, raw=False, **operation_config): + """Creates or updates an EnvironmentSpecificationVersion. + + :param name: Name of EnvironmentSpecificationVersion. + :type name: str + :param version: Version of EnvironmentSpecificationVersion. + :type version: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.EnvironmentSpecificationVersion + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: EnvironmentSpecificationVersionResource or ClientRawResponse + if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.EnvironmentSpecificationVersionResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + body = models.EnvironmentSpecificationVersionResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'version': self._serialize.url("version", version, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'EnvironmentSpecificationVersionResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('EnvironmentSpecificationVersionResource', response) + if response.status_code == 201: + deserialized = self._deserialize('EnvironmentSpecificationVersionResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py new file mode 100644 index 000000000000..afd6987ae922 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_jobs_operations.py @@ -0,0 +1,417 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class JobsOperations(object): + """JobsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, resource_group_name, workspace_name, skip=None, job_type=None, tags=None, tag=None, custom_headers=None, raw=False, **operation_config): + """Lists Jobs in the workspace. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. + :type skip: str + :param job_type: Type of job to be returned. + :type job_type: str + :param tags: Tags for job to be returned. + :type tags: str + :param tag: Jobs returned will have this tag key. + :type tag: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of JobBaseResource + :rtype: + ~azure.mgmt.machinelearningservices.models.JobBaseResourcePaged[~azure.mgmt.machinelearningservices.models.JobBaseResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + if job_type is not None: + query_parameters['jobType'] = self._serialize.query("job_type", job_type, 'str') + if tags is not None: + query_parameters['tags'] = self._serialize.query("tags", tags, 'str') + if tag is not None: + query_parameters['tag'] = self._serialize.query("tag", tag, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.JobBaseResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs'} + + + def _delete_initial( + self, id, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'id': self._serialize.url("id", id, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + header_dict = { + 'x-ms-async-operation-timeout': 'duration', + 'Location': 'str', + 'Retry-After': 'int', + } + client_raw_response.add_headers(header_dict) + return client_raw_response + + def delete( + self, id, resource_group_name, workspace_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Deletes a Job (asynchronous). + + :param id: The name and identifier for the Job. + :type id: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._delete_initial( + id=id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + client_raw_response.add_headers({ + 'x-ms-async-operation-timeout': 'duration', + 'Location': 'str', + 'Retry-After': 'int', + }) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} + + def get( + self, id, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Gets a Job by name/id. + + :param id: The name and identifier for the Job. + :type id: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: JobBaseResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.JobBaseResource or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'id': self._serialize.url("id", id, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('JobBaseResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} + + def create_or_update( + self, id, resource_group_name, workspace_name, properties, custom_headers=None, raw=False, **operation_config): + """Creates and executes a Job. + + :param id: The name and identifier for the Job. + :type id: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param properties: Additional attributes of the entity. + :type properties: ~azure.mgmt.machinelearningservices.models.JobBase + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: JobBaseResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.JobBaseResource or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + body = models.JobBaseResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'JobBaseResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('JobBaseResource', response) + if response.status_code == 201: + deserialized = self._deserialize('JobBaseResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}'} + + def cancel( + self, id, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Cancels a Job. + + :param id: The name and identifier for the Job. + :type id: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.cancel.metadata['url'] + path_format_arguments = { + 'id': self._serialize.url("id", id, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_machine_learning_compute_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_labeling_jobs_operations.py similarity index 60% rename from sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_machine_learning_compute_operations.py rename to sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_labeling_jobs_operations.py index 203b86e19b28..b567f9e34d75 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_machine_learning_compute_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_labeling_jobs_operations.py @@ -17,8 +17,8 @@ from .. import models -class MachineLearningComputeOperations(object): - """MachineLearningComputeOperations operations. +class LabelingJobsOperations(object): + """LabelingJobsOperations operations. You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. @@ -26,7 +26,7 @@ class MachineLearningComputeOperations(object): :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. - :ivar api_version: Version of Azure Machine Learning resource provider API. Constant value: "2019-05-01". + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". """ models = models @@ -36,48 +36,52 @@ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer - self.api_version = "2019-05-01" + self.api_version = "2021-03-01-preview" self.config = config - def list_by_workspace( - self, resource_group_name, workspace_name, skiptoken=None, custom_headers=None, raw=False, **operation_config): - """Gets computes in specified workspace. + def list( + self, resource_group_name, workspace_name, skip=None, count=None, custom_headers=None, raw=False, **operation_config): + """Lists labeling jobs in the workspace. - :param resource_group_name: Name of the resource group in which - workspace is located. + :param resource_group_name: The name of the resource group. The name + is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str - :param skiptoken: Continuation token for pagination. - :type skiptoken: str + :param skip: Continuation token for pagination. + :type skip: str + :param count: Number of labeling jobs to return. + :type count: int :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :return: An iterator like instance of ComputeResource + :return: An iterator like instance of LabelingJobResource :rtype: - ~azure.mgmt.machinelearningservices.models.ComputeResourcePaged[~azure.mgmt.machinelearningservices.models.ComputeResource] + ~azure.mgmt.machinelearningservices.models.LabelingJobResourcePaged[~azure.mgmt.machinelearningservices.models.LabelingJobResource] :raises: - :class:`MachineLearningServiceErrorException` + :class:`ErrorResponseException` """ def prepare_request(next_link=None): if not next_link: # Construct URL - url = self.list_by_workspace.metadata['url'] + url = self.list.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + if count is not None: + query_parameters['count'] = self._serialize.query("count", count, 'int') else: url = next_link @@ -103,7 +107,7 @@ def internal_paging(next_link=None): response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + raise models.ErrorResponseException(self._deserialize, response) return response @@ -111,48 +115,112 @@ def internal_paging(next_link=None): header_dict = None if raw: header_dict = {} - deserialized = models.ComputeResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + deserialized = models.LabelingJobResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) return deserialized - list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs'} + + def delete( + self, id, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Delete a labeling job. + + :param id: The name and identifier for the LabelingJob. + :type id: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'id': self._serialize.url("id", id, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} def get( - self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, **operation_config): - """Gets compute definition by its name. Any secrets (storage keys, service - credentials, etc) are not returned - use 'keys' nested resource to get - them. + self, id, resource_group_name, workspace_name, include_job_instructions=None, include_label_categories=None, custom_headers=None, raw=False, **operation_config): + """Gets a labeling job by name/id. - :param resource_group_name: Name of the resource group in which - workspace is located. + :param id: The name and identifier for the LabelingJob. + :type id: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str + :param include_job_instructions: Boolean value to indicate whether to + include JobInstructions in response. + :type include_job_instructions: bool + :param include_label_categories: Boolean value to indicate Whether to + include LabelCategories in response. + :type include_label_categories: bool :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides`. - :return: ComputeResource or ClientRawResponse if raw=true - :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResource or - ~msrest.pipeline.ClientRawResponse + :return: LabelingJobResource or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.LabelingJobResource + or ~msrest.pipeline.ClientRawResponse :raises: - :class:`MachineLearningServiceErrorException` + :class:`ErrorResponseException` """ # Construct URL url = self.get.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str') + 'id': self._serialize.url("id", id, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if include_job_instructions is not None: + query_parameters['includeJobInstructions'] = self._serialize.query("include_job_instructions", include_job_instructions, 'bool') + if include_label_categories is not None: + query_parameters['includeLabelCategories'] = self._serialize.query("include_label_categories", include_label_categories, 'bool') # Construct headers header_parameters = {} @@ -169,35 +237,37 @@ def get( response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + raise models.ErrorResponseException(self._deserialize, response) deserialized = None if response.status_code == 200: - deserialized = self._deserialize('ComputeResource', response) + deserialized = self._deserialize('LabelingJobResource', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} def _create_or_update_initial( - self, resource_group_name, workspace_name, compute_name, parameters, custom_headers=None, raw=False, **operation_config): + self, id, resource_group_name, workspace_name, properties, custom_headers=None, raw=False, **operation_config): + body = models.LabelingJobResource(properties=properties) + # Construct URL url = self.create_or_update.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str') + 'id': self._serialize.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) # Construct headers header_parameters = {} @@ -211,26 +281,28 @@ def _create_or_update_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct body - body_content = self._serialize.body(parameters, 'ComputeResource') + body_content = self._serialize.body(body, 'LabelingJobResource') # Construct and send request request = self._client.put(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 201]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + raise models.ErrorResponseException(self._deserialize, response) deserialized = None header_dict = {} if response.status_code == 200: - deserialized = self._deserialize('ComputeResource', response) + deserialized = self._deserialize('LabelingJobResource', response) header_dict = { + 'x-ms-async-operation-timeout': 'duration', 'Azure-AsyncOperation': 'str', } if response.status_code == 201: - deserialized = self._deserialize('ComputeResource', response) + deserialized = self._deserialize('LabelingJobResource', response) header_dict = { + 'x-ms-async-operation-timeout': 'duration', 'Azure-AsyncOperation': 'str', } @@ -242,40 +314,38 @@ def _create_or_update_initial( return deserialized def create_or_update( - self, resource_group_name, workspace_name, compute_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): - """Creates or updates compute. This call will overwrite a compute if it - exists. This is a nonrecoverable operation. If your intent is to create - a new compute, do a GET first to verify that it does not exist yet. + self, id, resource_group_name, workspace_name, properties, custom_headers=None, raw=False, polling=True, **operation_config): + """Creates or updates a labeling job (asynchronous). - :param resource_group_name: Name of the resource group in which - workspace is located. + :param id: The name and identifier for the LabelingJob. + :type id: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param parameters: Payload with Machine Learning compute definition. - :type parameters: - ~azure.mgmt.machinelearningservices.models.ComputeResource + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.LabelingJob :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy - :return: An instance of LROPoller that returns ComputeResource or - ClientRawResponse if raw==True + :return: An instance of LROPoller that returns LabelingJobResource or + ClientRawResponse if raw==True :rtype: - ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.LabelingJobResource] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.ComputeResource]] + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.LabelingJobResource]] :raises: - :class:`MachineLearningServiceErrorException` + :class:`ErrorResponseException` """ raw_result = self._create_or_update_initial( + id=id, resource_group_name=resource_group_name, workspace_name=workspace_name, - compute_name=compute_name, - parameters=parameters, + properties=properties, custom_headers=custom_headers, raw=True, **operation_config @@ -283,9 +353,10 @@ def create_or_update( def get_long_running_output(response): header_dict = { + 'x-ms-async-operation-timeout': 'duration', 'Azure-AsyncOperation': 'str', } - deserialized = self._deserialize('ComputeResource', response) + deserialized = self._deserialize('LabelingJobResource', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) @@ -301,26 +372,24 @@ def get_long_running_output(response): elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} - + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}'} - def _update_initial( - self, resource_group_name, workspace_name, compute_name, scale_settings=None, custom_headers=None, raw=False, **operation_config): - parameters = models.ClusterUpdateParameters(scale_settings=scale_settings) + def _export_labels_initial( + self, id, resource_group_name, workspace_name, body, custom_headers=None, raw=False, **operation_config): # Construct URL - url = self.update.metadata['url'] + url = self.export_labels.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str') + 'id': self._serialize.url("id", id, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) # Construct headers header_parameters = {} @@ -334,71 +403,79 @@ def _update_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct body - body_content = self._serialize.body(parameters, 'ClusterUpdateParameters') + body_content = self._serialize.body(body, 'ExportSummary') # Construct and send request - request = self._client.patch(url, query_parameters, header_parameters, body_content) + request = self._client.post(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) - if response.status_code not in [200]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + if response.status_code not in [200, 202]: + raise models.ErrorResponseException(self._deserialize, response) deserialized = None + header_dict = {} if response.status_code == 200: - deserialized = self._deserialize('ComputeResource', response) + deserialized = self._deserialize('ExportSummary', response) + header_dict = { + 'Location': 'str', + 'Retry-After': 'int', + } if raw: client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) return client_raw_response return deserialized - def update( - self, resource_group_name, workspace_name, compute_name, scale_settings=None, custom_headers=None, raw=False, polling=True, **operation_config): - """Updates properties of a compute. This call will overwrite a compute if - it exists. This is a nonrecoverable operation. + def export_labels( + self, id, resource_group_name, workspace_name, body, custom_headers=None, raw=False, polling=True, **operation_config): + """Export labels from a labeling job (asynchronous). - :param resource_group_name: Name of the resource group in which - workspace is located. + :param id: The name and identifier for the LabelingJob. + :type id: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param scale_settings: Scale settings. Desired scale settings for the - amlCompute. - :type scale_settings: - ~azure.mgmt.machinelearningservices.models.ScaleSettings + :param body: The export summary. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy - :return: An instance of LROPoller that returns ComputeResource or - ClientRawResponse if raw==True + :return: An instance of LROPoller that returns ExportSummary or + ClientRawResponse if raw==True :rtype: - ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.ComputeResource]] + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.ExportSummary]] :raises: - :class:`MachineLearningServiceErrorException` + :class:`ErrorResponseException` """ - raw_result = self._update_initial( + raw_result = self._export_labels_initial( + id=id, resource_group_name=resource_group_name, workspace_name=workspace_name, - compute_name=compute_name, - scale_settings=scale_settings, + body=body, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): - deserialized = self._deserialize('ComputeResource', response) + header_dict = { + 'Location': 'str', + 'Retry-After': 'int', + } + deserialized = self._deserialize('ExportSummary', response) if raw: client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) return client_raw_response return deserialized @@ -406,29 +483,46 @@ def get_long_running_output(response): lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} + export_labels.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels'} + def pause( + self, id, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Pause a labeling job. - def _delete_initial( - self, resource_group_name, workspace_name, compute_name, underlying_resource_action, custom_headers=None, raw=False, **operation_config): + :param id: The name and identifier for the LabelingJob. + :type id: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ # Construct URL - url = self.delete.metadata['url'] + url = self.pause.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str') + 'id': self._serialize.url("id", id, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) # Construct headers header_parameters = {} @@ -440,37 +534,70 @@ def _delete_initial( header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request - request = self._client.delete(url, query_parameters, header_parameters) + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + pause.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause'} + + + def _resume_initial( + self, id, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.resume.metadata['url'] + path_format_arguments = { + 'id': self._serialize.url("id", id, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200, 202]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + raise models.ErrorResponseException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) header_dict = { - 'Azure-AsyncOperation': 'str', 'Location': 'str', + 'Retry-After': 'int', } client_raw_response.add_headers(header_dict) return client_raw_response - def delete( - self, resource_group_name, workspace_name, compute_name, underlying_resource_action, custom_headers=None, raw=False, polling=True, **operation_config): - """Deletes specified Machine Learning compute. + def resume( + self, id, resource_group_name, workspace_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Resume a labeling job (asynchronous). - :param resource_group_name: Name of the resource group in which - workspace is located. + :param id: The name and identifier for the LabelingJob. + :type id: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param underlying_resource_action: Delete the underlying compute if - 'Delete', or detach the underlying compute from workspace if 'Detach'. - Possible values include: 'Delete', 'Detach' - :type underlying_resource_action: str or - ~azure.mgmt.machinelearningservices.models.UnderlyingResourceAction :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response @@ -481,13 +608,12 @@ def delete( :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: - :class:`MachineLearningServiceErrorException` + :class:`ErrorResponseException` """ - raw_result = self._delete_initial( + raw_result = self._resume_initial( + id=id, resource_group_name=resource_group_name, workspace_name=workspace_name, - compute_name=compute_name, - underlying_resource_action=underlying_resource_action, custom_headers=custom_headers, raw=True, **operation_config @@ -497,147 +623,16 @@ def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) client_raw_response.add_headers({ - 'Azure-AsyncOperation': 'str', 'Location': 'str', + 'Retry-After': 'int', }) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} - - def list_nodes( - self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, **operation_config): - """Get the details (e.g IP address, port etc) of all the compute nodes in - the compute. - - :param resource_group_name: Name of the resource group in which - workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: AmlComputeNodesInformation or ClientRawResponse if raw=true - :rtype: - ~azure.mgmt.machinelearningservices.models.AmlComputeNodesInformation - or ~msrest.pipeline.ClientRawResponse - :raises: - :class:`MachineLearningServiceErrorException` - """ - # Construct URL - url = self.list_nodes.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('AmlComputeNodesInformation', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} - - def list_keys( - self, resource_group_name, workspace_name, compute_name, custom_headers=None, raw=False, **operation_config): - """Gets secrets related to Machine Learning compute (storage keys, service - credentials, etc). - - :param resource_group_name: Name of the resource group in which - workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. - :type compute_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: ComputeSecrets or ClientRawResponse if raw=true - :rtype: ~azure.mgmt.machinelearningservices.models.ComputeSecrets or - ~msrest.pipeline.ClientRawResponse - :raises: - :class:`MachineLearningServiceErrorException` - """ - # Construct URL - url = self.list_keys.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), - 'computeName': self._serialize.url("compute_name", compute_name, 'str') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('ComputeSecrets', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} + resume.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py new file mode 100644 index 000000000000..16990b6a0291 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_containers_operations.py @@ -0,0 +1,316 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class ModelContainersOperations(object): + """ModelContainersOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, resource_group_name, workspace_name, skip=None, count=None, custom_headers=None, raw=False, **operation_config): + """List model containers. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. + :type skip: str + :param count: Maximum number of results to return. + :type count: int + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of ModelContainerResource + :rtype: + ~azure.mgmt.machinelearningservices.models.ModelContainerResourcePaged[~azure.mgmt.machinelearningservices.models.ModelContainerResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + if count is not None: + query_parameters['count'] = self._serialize.query("count", count, 'int') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.ModelContainerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models'} + + def delete( + self, name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Delete container. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} + + def get( + self, name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Get container. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ModelContainerResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.ModelContainerResource or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ModelContainerResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} + + def create_or_update( + self, name, resource_group_name, workspace_name, properties, custom_headers=None, raw=False, **operation_config): + """Create or update container. + + :param name: Container name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.ModelContainer + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ModelContainerResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.ModelContainerResource or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + body = models.ModelContainerResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'ModelContainerResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ModelContainerResource', response) + if response.status_code == 201: + deserialized = self._deserialize('ModelContainerResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py new file mode 100644 index 000000000000..eab5dd8027a1 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_model_versions_operations.py @@ -0,0 +1,354 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class ModelVersionsOperations(object): + """ModelVersionsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, name, resource_group_name, workspace_name, skip=None, order_by=None, top=None, version=None, description=None, offset=None, tags=None, properties=None, custom_headers=None, raw=False, **operation_config): + """List model versions. + + :param name: Model name. + :type name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skip: Continuation token for pagination. + :type skip: str + :param order_by: Ordering of list. + :type order_by: str + :param top: Maximum number of records to return. + :type top: int + :param version: Model version. + :type version: str + :param description: Model description. + :type description: str + :param offset: Number of initial results to skip. + :type offset: int + :param tags: Comma-separated list of tag names (and optionally + values). Example: tag1,tag2=value2 + :type tags: str + :param properties: Comma-separated list of property names (and + optionally values). Example: prop1,prop2=value2 + :type properties: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of ModelVersionResource + :rtype: + ~azure.mgmt.machinelearningservices.models.ModelVersionResourcePaged[~azure.mgmt.machinelearningservices.models.ModelVersionResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + if order_by is not None: + query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int') + if version is not None: + query_parameters['version'] = self._serialize.query("version", version, 'str') + if description is not None: + query_parameters['description'] = self._serialize.query("description", description, 'str') + if offset is not None: + query_parameters['offset'] = self._serialize.query("offset", offset, 'int') + if tags is not None: + query_parameters['tags'] = self._serialize.query("tags", tags, 'str') + if properties is not None: + query_parameters['properties'] = self._serialize.query("properties", properties, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.ModelVersionResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions'} + + def delete( + self, name, version, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Delete version. + + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'version': self._serialize.url("version", version, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} + + def get( + self, name, version, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Get version. + + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ModelVersionResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.ModelVersionResource or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str'), + 'version': self._serialize.url("version", version, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ModelVersionResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} + + def create_or_update( + self, name, version, resource_group_name, workspace_name, properties, custom_headers=None, raw=False, **operation_config): + """Create or update version. + + :param name: Container name. + :type name: str + :param version: Version identifier. + :type version: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param properties: Additional attributes of the entity. + :type properties: + ~azure.mgmt.machinelearningservices.models.ModelVersion + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ModelVersionResource or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.ModelVersionResource or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + body = models.ModelVersionResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'name': self._serialize.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'version': self._serialize.url("version", version, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'ModelVersionResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ModelVersionResource', response) + if response.status_code == 201: + deserialized = self._deserialize('ModelVersionResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py new file mode 100644 index 000000000000..24dad4e0b6b3 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_deployments_operations.py @@ -0,0 +1,624 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class OnlineDeploymentsOperations(object): + """OnlineDeploymentsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, endpoint_name, resource_group_name, workspace_name, order_by=None, top=None, skip=None, custom_headers=None, raw=False, **operation_config): + """List Inference Endpoint Deployments. + + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param order_by: Ordering of list. + :type order_by: str + :param top: Top of list. + :type top: int + :param skip: Continuation token for pagination. + :type skip: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of OnlineDeploymentTrackedResource + :rtype: + ~azure.mgmt.machinelearningservices.models.OnlineDeploymentTrackedResourcePaged[~azure.mgmt.machinelearningservices.models.OnlineDeploymentTrackedResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if order_by is not None: + query_parameters['$orderBy'] = self._serialize.query("order_by", order_by, 'str') + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'int') + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.OnlineDeploymentTrackedResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments'} + + + def _delete_initial( + self, endpoint_name, deployment_name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + header_dict = { + 'x-ms-async-operation-timeout': 'duration', + 'Location': 'str', + 'Retry-After': 'int', + } + client_raw_response.add_headers(header_dict) + return client_raw_response + + def delete( + self, endpoint_name, deployment_name, resource_group_name, workspace_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Delete Inference Endpoint Deployment (asynchronous). + + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._delete_initial( + endpoint_name=endpoint_name, + deployment_name=deployment_name, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + client_raw_response.add_headers({ + 'x-ms-async-operation-timeout': 'duration', + 'Location': 'str', + 'Retry-After': 'int', + }) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} + + def get( + self, endpoint_name, deployment_name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Get Inference Deployment Deployment. + + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: OnlineDeploymentTrackedResource or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.OnlineDeploymentTrackedResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('OnlineDeploymentTrackedResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} + + + def _update_initial( + self, endpoint_name, deployment_name, resource_group_name, workspace_name, body, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'PartialOnlineDeploymentPartialTrackedResource') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + header_dict = {} + + if response.status_code == 200: + deserialized = self._deserialize('OnlineDeploymentTrackedResource', response) + header_dict = { + 'x-ms-async-operation-timeout': 'duration', + 'Location': 'str', + 'Retry-After': 'int', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + def update( + self, endpoint_name, deployment_name, resource_group_name, workspace_name, body, custom_headers=None, raw=False, polling=True, **operation_config): + """Update Online Deployment (asynchronous). + + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param body: Online Endpoint entity to apply during operation. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialOnlineDeploymentPartialTrackedResource + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + OnlineDeploymentTrackedResource or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.OnlineDeploymentTrackedResource] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.OnlineDeploymentTrackedResource]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._update_initial( + endpoint_name=endpoint_name, + deployment_name=deployment_name, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + body=body, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + header_dict = { + 'x-ms-async-operation-timeout': 'duration', + 'Location': 'str', + 'Retry-After': 'int', + } + deserialized = self._deserialize('OnlineDeploymentTrackedResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} + + + def _create_or_update_initial( + self, endpoint_name, deployment_name, resource_group_name, workspace_name, body, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'OnlineDeploymentTrackedResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + header_dict = {} + + if response.status_code == 200: + deserialized = self._deserialize('OnlineDeploymentTrackedResource', response) + header_dict = { + 'x-ms-async-operation-timeout': 'duration', + 'Azure-AsyncOperation': 'str', + } + if response.status_code == 201: + deserialized = self._deserialize('OnlineDeploymentTrackedResource', response) + header_dict = { + 'x-ms-async-operation-timeout': 'duration', + 'Azure-AsyncOperation': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + def create_or_update( + self, endpoint_name, deployment_name, resource_group_name, workspace_name, body, custom_headers=None, raw=False, polling=True, **operation_config): + """Create or update Inference Endpoint Deployment (asynchronous). + + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. + :type deployment_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param body: Inference Endpoint entity to apply during operation. + :type body: + ~azure.mgmt.machinelearningservices.models.OnlineDeploymentTrackedResource + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + OnlineDeploymentTrackedResource or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.OnlineDeploymentTrackedResource] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.OnlineDeploymentTrackedResource]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._create_or_update_initial( + endpoint_name=endpoint_name, + deployment_name=deployment_name, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + body=body, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + header_dict = { + 'x-ms-async-operation-timeout': 'duration', + 'Azure-AsyncOperation': 'str', + } + deserialized = self._deserialize('OnlineDeploymentTrackedResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}'} + + def get_logs( + self, endpoint_name, deployment_name, resource_group_name, workspace_name, container_type=None, tail=None, custom_headers=None, raw=False, **operation_config): + """Polls an Endpoint operation. + + :param endpoint_name: Inference endpoint name. + :type endpoint_name: str + :param deployment_name: The name and identifier for the endpoint. + :type deployment_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param container_type: The type of container to retrieve logs from. + Possible values include: 'StorageInitializer', 'InferenceServer' + :type container_type: str or + ~azure.mgmt.machinelearningservices.models.ContainerType + :param tail: The maximum number of lines to tail. + :type tail: int + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: DeploymentLogs or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + body = models.DeploymentLogsRequest(container_type=container_type, tail=tail) + + # Construct URL + url = self.get_logs.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'DeploymentLogsRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('DeploymentLogs', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_logs.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py new file mode 100644 index 000000000000..713de3d7c151 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_online_endpoints_operations.py @@ -0,0 +1,790 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling + +from .. import models + + +class OnlineEndpointsOperations(object): + """OnlineEndpointsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, resource_group_name, workspace_name, name=None, count=None, compute_type=None, skip=None, tags=None, properties=None, order_by=None, custom_headers=None, raw=False, **operation_config): + """List Online Endpoints. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param name: Name of the endpoint. + :type name: str + :param count: Number of endpoints to be retrieved in a page of + results. + :type count: int + :param compute_type: EndpointComputeType to be filtered by. Possible + values include: 'Managed', 'K8S', 'AzureMLCompute' + :type compute_type: str or + ~azure.mgmt.machinelearningservices.models.EndpointComputeType + :param skip: Continuation token for pagination. + :type skip: str + :param tags: A set of tags with which to filter the returned models. + It is a comma separated string of tags key or tags key=value. Example: + tagKey1,tagKey2,tagKey3=value3 . + :type tags: str + :param properties: A set of properties with which to filter the + returned models. It is a comma separated string of properties key + and/or properties key=value Example: propKey1,propKey2,propKey3=value3 + . + :type properties: str + :param order_by: The option to order the response. Possible values + include: 'CreatedAtDesc', 'CreatedAtAsc', 'UpdatedAtDesc', + 'UpdatedAtAsc' + :type order_by: str or + ~azure.mgmt.machinelearningservices.models.OrderString + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of OnlineEndpointTrackedResource + :rtype: + ~azure.mgmt.machinelearningservices.models.OnlineEndpointTrackedResourcePaged[~azure.mgmt.machinelearningservices.models.OnlineEndpointTrackedResource] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if name is not None: + query_parameters['name'] = self._serialize.query("name", name, 'str') + if count is not None: + query_parameters['count'] = self._serialize.query("count", count, 'int') + if compute_type is not None: + query_parameters['computeType'] = self._serialize.query("compute_type", compute_type, 'str') + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') + if tags is not None: + query_parameters['tags'] = self._serialize.query("tags", tags, 'str') + if properties is not None: + query_parameters['properties'] = self._serialize.query("properties", properties, 'str') + if order_by is not None: + query_parameters['orderBy'] = self._serialize.query("order_by", order_by, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.OnlineEndpointTrackedResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints'} + + + def _delete_initial( + self, endpoint_name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + header_dict = { + 'x-ms-async-operation-timeout': 'duration', + 'Location': 'str', + 'Retry-After': 'int', + } + client_raw_response.add_headers(header_dict) + return client_raw_response + + def delete( + self, endpoint_name, resource_group_name, workspace_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Delete Online Endpoint (asynchronous). + + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._delete_initial( + endpoint_name=endpoint_name, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + client_raw_response.add_headers({ + 'x-ms-async-operation-timeout': 'duration', + 'Location': 'str', + 'Retry-After': 'int', + }) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} + + def get( + self, endpoint_name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Get Online Endpoint. + + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: OnlineEndpointTrackedResource or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.OnlineEndpointTrackedResource + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('OnlineEndpointTrackedResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} + + + def _update_initial( + self, endpoint_name, resource_group_name, workspace_name, body, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'PartialOnlineEndpointPartialTrackedResource') + + # Construct and send request + request = self._client.patch(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + header_dict = {} + + if response.status_code == 200: + deserialized = self._deserialize('OnlineEndpointTrackedResource', response) + header_dict = { + 'x-ms-async-operation-timeout': 'duration', + 'Location': 'str', + 'Retry-After': 'int', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + def update( + self, endpoint_name, resource_group_name, workspace_name, body, custom_headers=None, raw=False, polling=True, **operation_config): + """Update Online Endpoint (asynchronous). + + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param body: Online Endpoint entity to apply during operation. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialOnlineEndpointPartialTrackedResource + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + OnlineEndpointTrackedResource or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpointTrackedResource] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.OnlineEndpointTrackedResource]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._update_initial( + endpoint_name=endpoint_name, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + body=body, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + header_dict = { + 'x-ms-async-operation-timeout': 'duration', + 'Location': 'str', + 'Retry-After': 'int', + } + deserialized = self._deserialize('OnlineEndpointTrackedResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} + + + def _create_or_update_initial( + self, endpoint_name, resource_group_name, workspace_name, body, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'OnlineEndpointTrackedResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 201]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + header_dict = {} + + if response.status_code == 200: + deserialized = self._deserialize('OnlineEndpointTrackedResource', response) + header_dict = { + 'x-ms-async-operation-timeout': 'duration', + 'Azure-AsyncOperation': 'str', + } + if response.status_code == 201: + deserialized = self._deserialize('OnlineEndpointTrackedResource', response) + header_dict = { + 'x-ms-async-operation-timeout': 'duration', + 'Azure-AsyncOperation': 'str', + } + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + def create_or_update( + self, endpoint_name, resource_group_name, workspace_name, body, custom_headers=None, raw=False, polling=True, **operation_config): + """Create or update Online Endpoint (asynchronous). + + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param body: Online Endpoint entity to apply during operation. + :type body: + ~azure.mgmt.machinelearningservices.models.OnlineEndpointTrackedResource + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + OnlineEndpointTrackedResource or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpointTrackedResource] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.OnlineEndpointTrackedResource]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._create_or_update_initial( + endpoint_name=endpoint_name, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + body=body, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + header_dict = { + 'x-ms-async-operation-timeout': 'duration', + 'Azure-AsyncOperation': 'str', + } + deserialized = self._deserialize('OnlineEndpointTrackedResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}'} + + def list_keys( + self, endpoint_name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """List EndpointAuthKeys for an Endpoint using Key-based authentication. + + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: EndpointAuthKeys or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.list_keys.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('EndpointAuthKeys', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys'} + + + def _regenerate_keys_initial( + self, endpoint_name, resource_group_name, workspace_name, key_type, key_value=None, custom_headers=None, raw=False, **operation_config): + body = models.RegenerateEndpointKeysRequest(key_type=key_type, key_value=key_value) + + # Construct URL + url = self.regenerate_keys.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(body, 'RegenerateEndpointKeysRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + header_dict = { + 'Location': 'str', + 'Retry-After': 'int', + } + client_raw_response.add_headers(header_dict) + return client_raw_response + + def regenerate_keys( + self, endpoint_name, resource_group_name, workspace_name, key_type, key_value=None, custom_headers=None, raw=False, polling=True, **operation_config): + """Regenerate EndpointAuthKeys for an Endpoint using Key-based + authentication (asynchronous). + + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param key_type: Specification for which type of key to generate. + Primary or Secondary. Possible values include: 'Primary', 'Secondary' + :type key_type: str or + ~azure.mgmt.machinelearningservices.models.KeyType + :param key_value: The value the key is set to. + :type key_value: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._regenerate_keys_initial( + endpoint_name=endpoint_name, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + key_type=key_type, + key_value=key_value, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + client_raw_response.add_headers({ + 'Location': 'str', + 'Retry-After': 'int', + }) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + regenerate_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys'} + + def get_token( + self, endpoint_name, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Retrieve a valid AAD token for an Endpoint using AMLToken-based + authentication. + + :param endpoint_name: Online Endpoint name. + :type endpoint_name: str + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: EndpointAuthToken or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthToken + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get_token.metadata['url'] + path_format_arguments = { + 'endpointName': self._serialize.url("endpoint_name", endpoint_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('EndpointAuthToken', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py index 7b558ec9d645..84da5bdda4c6 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_operations.py @@ -24,7 +24,7 @@ class Operations(object): :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. - :ivar api_version: Version of Azure Machine Learning resource provider API. Constant value: "2019-05-01". + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". """ models = models @@ -34,7 +34,7 @@ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer - self.api_version = "2019-05-01" + self.api_version = "2021-03-01-preview" self.config = config @@ -52,7 +52,7 @@ def list( :rtype: ~azure.mgmt.machinelearningservices.models.OperationPaged[~azure.mgmt.machinelearningservices.models.Operation] :raises: - :class:`MachineLearningServiceErrorException` + :class:`ErrorResponseException` """ def prepare_request(next_link=None): if not next_link: @@ -61,7 +61,7 @@ def prepare_request(next_link=None): # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) else: url = next_link @@ -87,7 +87,7 @@ def internal_paging(next_link=None): response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + raise models.ErrorResponseException(self._deserialize, response) return response diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..0d684b23a2b7 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_endpoint_connections_operations.py @@ -0,0 +1,311 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class PrivateEndpointConnectionsOperations(object): + """PrivateEndpointConnectionsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """List all the private endpoint connections associated with the + workspace. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of PrivateEndpointConnection + :rtype: + ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionPaged[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.PrivateEndpointConnectionPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections'} + + def get( + self, resource_group_name, workspace_name, private_endpoint_connection_name, custom_headers=None, raw=False, **operation_config): + """Gets the specified private endpoint connection associated with the + workspace. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private + endpoint connection associated with the workspace + :type private_endpoint_connection_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PrivateEndpointConnection or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PrivateEndpointConnection', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} + + def create_or_update( + self, resource_group_name, workspace_name, private_endpoint_connection_name, properties, custom_headers=None, raw=False, **operation_config): + """Update the state of specified private endpoint connection associated + with the workspace. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private + endpoint connection associated with the workspace + :type private_endpoint_connection_name: str + :param properties: The private endpoint connection properties. + :type properties: + ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PrivateEndpointConnection or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(properties, 'PrivateEndpointConnection') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PrivateEndpointConnection', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} + + def delete( + self, resource_group_name, workspace_name, private_endpoint_connection_name, custom_headers=None, raw=False, **operation_config): + """Deletes the specified private endpoint connection associated with the + workspace. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private + endpoint connection associated with the workspace + :type private_endpoint_connection_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py new file mode 100644 index 000000000000..5456d4e748e5 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_private_link_resources_operations.py @@ -0,0 +1,105 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class PrivateLinkResourcesOperations(object): + """PrivateLinkResourcesOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Gets the private link resources that need to be created for a + workspace. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PrivateLinkResourceListResult or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.PrivateLinkResourceListResult + or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PrivateLinkResourceListResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py new file mode 100644 index 000000000000..7c009e78f925 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_quotas_operations.py @@ -0,0 +1,176 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class QuotasOperations(object): + """QuotasOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def update( + self, location, value=None, location1=None, custom_headers=None, raw=False, **operation_config): + """Update quota for each VM family in workspace. + + :param location: The location for update quota is queried. + :type location: str + :param value: The list for update quota. + :type value: + list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] + :param location1: Region of workspace quota to be updated. + :type location1: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: UpdateWorkspaceQuotasResult or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + parameters = models.QuotaUpdateParameters(value=value, location=location1) + + # Construct URL + url = self.update.metadata['url'] + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'QuotaUpdateParameters') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('UpdateWorkspaceQuotasResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} + + def list( + self, location, custom_headers=None, raw=False, **operation_config): + """Gets the currently assigned Workspace Quotas based on VMFamily. + + :param location: The location for which resource usage is queried. + :type location: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of ResourceQuota + :rtype: + ~azure.mgmt.machinelearningservices.models.ResourceQuotaPaged[~azure.mgmt.machinelearningservices.models.ResourceQuota] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.ResourceQuotaPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py index 212fbc700f27..0868c9e2bc5d 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_usages_operations.py @@ -25,7 +25,7 @@ class UsagesOperations(object): :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. - :ivar api_version: Version of Azure Machine Learning resource provider API. Constant value: "2019-05-01". + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". """ models = models @@ -35,7 +35,7 @@ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer - self.api_version = "2019-05-01" + self.api_version = "2021-03-01-preview" self.config = config @@ -61,14 +61,14 @@ def prepare_request(next_link=None): # Construct URL url = self.list.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) else: url = next_link diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py index afbf3b4fc0b4..8dd480f781a5 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_virtual_machine_sizes_operations.py @@ -25,7 +25,7 @@ class VirtualMachineSizesOperations(object): :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. - :ivar api_version: Version of Azure Machine Learning resource provider API. Constant value: "2019-05-01". + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". """ models = models @@ -35,7 +35,7 @@ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer - self.api_version = "2019-05-01" + self.api_version = "2021-03-01-preview" self.config = config @@ -61,13 +61,13 @@ def list( url = self.list.metadata['url'] path_format_arguments = { 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1) } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) # Construct headers header_parameters = {} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py new file mode 100644 index 000000000000..b1c5987fcaa7 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_connections_operations.py @@ -0,0 +1,311 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class WorkspaceConnectionsOperations(object): + """WorkspaceConnectionsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, resource_group_name, workspace_name, target=None, category=None, custom_headers=None, raw=False, **operation_config): + """List all connections under a AML workspace. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param target: Target of the workspace connection. + :type target: str + :param category: Category of the workspace connection. + :type category: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of WorkspaceConnection + :rtype: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPaged[~azure.mgmt.machinelearningservices.models.WorkspaceConnection] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if target is not None: + query_parameters['target'] = self._serialize.query("target", target, 'str') + if category is not None: + query_parameters['category'] = self._serialize.query("category", category, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.WorkspaceConnectionPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} + + def create( + self, resource_group_name, workspace_name, connection_name, parameters, custom_headers=None, raw=False, **operation_config): + """Add a new workspace connection. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection + :type connection_name: str + :param parameters: The object for creating or updating a new workspace + connection + :type parameters: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnection + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: WorkspaceConnection or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnection + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.create.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'connectionName': self._serialize.url("connection_name", connection_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(parameters, 'WorkspaceConnection') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('WorkspaceConnection', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} + + def get( + self, resource_group_name, workspace_name, connection_name, custom_headers=None, raw=False, **operation_config): + """Get the detail of a workspace connection. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection + :type connection_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: WorkspaceConnection or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnection + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'connectionName': self._serialize.url("connection_name", connection_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('WorkspaceConnection', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} + + def delete( + self, resource_group_name, workspace_name, connection_name, custom_headers=None, raw=False, **operation_config): + """Delete a workspace connection. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection + :type connection_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'connectionName': self._serialize.url("connection_name", connection_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + raise models.ErrorResponseException(self._deserialize, response) + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py new file mode 100644 index 000000000000..ce2cebc540ef --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_features_operations.py @@ -0,0 +1,111 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class WorkspaceFeaturesOperations(object): + """WorkspaceFeaturesOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """Lists all enabled features for a workspace. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of AmlUserFeature + :rtype: + ~azure.mgmt.machinelearningservices.models.AmlUserFeaturePaged[~azure.mgmt.machinelearningservices.models.AmlUserFeature] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.AmlUserFeaturePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_skus_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_skus_operations.py new file mode 100644 index 000000000000..ba274a190794 --- /dev/null +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspace_skus_operations.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse + +from .. import models + + +class WorkspaceSkusOperations(object): + """WorkspaceSkusOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2021-03-01-preview" + + self.config = config + + def list( + self, custom_headers=None, raw=False, **operation_config): + """Lists all skus with associated features. + + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of WorkspaceSku + :rtype: + ~azure.mgmt.machinelearningservices.models.WorkspaceSkuPaged[~azure.mgmt.machinelearningservices.models.WorkspaceSku] + :raises: + :class:`ErrorResponseException` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1) + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.WorkspaceSkuPaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} diff --git a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py index 27bff19114d8..25609a836860 100644 --- a/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py +++ b/sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/operations/_workspaces_operations.py @@ -11,6 +11,8 @@ import uuid from msrest.pipeline import ClientRawResponse +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling from .. import models @@ -24,7 +26,7 @@ class WorkspacesOperations(object): :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. - :ivar api_version: Version of Azure Machine Learning resource provider API. Constant value: "2019-05-01". + :ivar api_version: The API version to use for this operation. Constant value: "2021-03-01-preview". """ models = models @@ -34,7 +36,7 @@ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer - self.api_version = "2019-05-01" + self.api_version = "2021-03-01-preview" self.config = config @@ -42,8 +44,8 @@ def get( self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): """Gets the properties of the specified machine learning workspace. - :param resource_group_name: Name of the resource group in which - workspace is located. + :param resource_group_name: The name of the resource group. The name + is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -56,20 +58,20 @@ def get( :rtype: ~azure.mgmt.machinelearningservices.models.Workspace or ~msrest.pipeline.ClientRawResponse :raises: - :class:`MachineLearningServiceErrorException` + :class:`ErrorResponseException` """ # Construct URL url = self.get.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) # Construct headers header_parameters = {} @@ -86,7 +88,7 @@ def get( response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + raise models.ErrorResponseException(self._deserialize, response) deserialized = None if response.status_code == 200: @@ -99,41 +101,21 @@ def get( return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} - def create_or_update( - self, resource_group_name, workspace_name, parameters, custom_headers=None, raw=False, **operation_config): - """Creates or updates a workspace with the specified parameters. - :param resource_group_name: Name of the resource group in which - workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param parameters: The parameters for creating or updating a machine - learning workspace. - :type parameters: ~azure.mgmt.machinelearningservices.models.Workspace - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: Workspace or ClientRawResponse if raw=true - :rtype: ~azure.mgmt.machinelearningservices.models.Workspace or - ~msrest.pipeline.ClientRawResponse - :raises: - :class:`MachineLearningServiceErrorException` - """ + def _create_or_update_initial( + self, resource_group_name, workspace_name, parameters, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.create_or_update.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) # Construct headers header_parameters = {} @@ -153,10 +135,11 @@ def create_or_update( request = self._client.put(url, query_parameters, header_parameters, body_content) response = self._client.send(request, stream=False, **operation_config) - if response.status_code not in [200, 201]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + if response.status_code not in [200, 201, 202]: + raise models.ErrorResponseException(self._deserialize, response) deserialized = None + if response.status_code == 200: deserialized = self._deserialize('Workspace', response) if response.status_code == 201: @@ -167,39 +150,75 @@ def create_or_update( return client_raw_response return deserialized - create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} - def delete( - self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): - """Deletes a machine learning workspace. + def create_or_update( + self, resource_group_name, workspace_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config): + """Creates or updates a workspace with the specified parameters. - :param resource_group_name: Name of the resource group in which - workspace is located. + :param resource_group_name: The name of the resource group. The name + is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str + :param parameters: The parameters for creating or updating a machine + learning workspace. + :type parameters: ~azure.mgmt.machinelearningservices.models.Workspace :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: None or ClientRawResponse if raw=true - :rtype: None or ~msrest.pipeline.ClientRawResponse + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns Workspace or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.Workspace] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.Workspace]] :raises: - :class:`MachineLearningServiceErrorException` + :class:`ErrorResponseException` """ + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + parameters=parameters, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('Workspace', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} + + + def _delete_initial( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.delete.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) # Construct headers header_parameters = {} @@ -214,20 +233,62 @@ def delete( request = self._client.delete(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) - if response.status_code not in [200, 204]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + if response.status_code not in [200, 202, 204]: + raise models.ErrorResponseException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response + + def delete( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Deletes a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} def update( self, resource_group_name, workspace_name, parameters, custom_headers=None, raw=False, **operation_config): """Updates a machine learning workspace with the specified parameters. - :param resource_group_name: Name of the resource group in which - workspace is located. + :param resource_group_name: The name of the resource group. The name + is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -244,20 +305,20 @@ def update( :rtype: ~azure.mgmt.machinelearningservices.models.Workspace or ~msrest.pipeline.ClientRawResponse :raises: - :class:`MachineLearningServiceErrorException` + :class:`ErrorResponseException` """ # Construct URL url = self.update.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) # Construct headers header_parameters = {} @@ -278,7 +339,7 @@ def update( response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + raise models.ErrorResponseException(self._deserialize, response) deserialized = None if response.status_code == 200: @@ -292,15 +353,15 @@ def update( update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} def list_by_resource_group( - self, resource_group_name, skiptoken=None, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, skip=None, custom_headers=None, raw=False, **operation_config): """Lists all the available machine learning workspaces under the specified resource group. - :param resource_group_name: Name of the resource group in which - workspace is located. + :param resource_group_name: The name of the resource group. The name + is case insensitive. :type resource_group_name: str - :param skiptoken: Continuation token for pagination. - :type skiptoken: str + :param skip: Continuation token for pagination. + :type skip: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -310,23 +371,23 @@ def list_by_resource_group( :rtype: ~azure.mgmt.machinelearningservices.models.WorkspacePaged[~azure.mgmt.machinelearningservices.models.Workspace] :raises: - :class:`MachineLearningServiceErrorException` + :class:`ErrorResponseException` """ def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list_by_resource_group.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str') + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1) } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') else: url = next_link @@ -352,7 +413,7 @@ def internal_paging(next_link=None): response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + raise models.ErrorResponseException(self._deserialize, response) return response @@ -371,8 +432,8 @@ def list_keys( for the storage account, app insights and password for container registry. - :param resource_group_name: Name of the resource group in which - workspace is located. + :param resource_group_name: The name of the resource group. The name + is case insensitive. :type resource_group_name: str :param workspace_name: Name of Azure Machine Learning workspace. :type workspace_name: str @@ -386,20 +447,20 @@ def list_keys( ~azure.mgmt.machinelearningservices.models.ListWorkspaceKeysResult or ~msrest.pipeline.ClientRawResponse :raises: - :class:`MachineLearningServiceErrorException` + :class:`ErrorResponseException` """ # Construct URL url = self.list_keys.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) # Construct headers header_parameters = {} @@ -416,7 +477,7 @@ def list_keys( response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + raise models.ErrorResponseException(self._deserialize, response) deserialized = None if response.status_code == 200: @@ -429,39 +490,21 @@ def list_keys( return deserialized list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} - def resync_keys( - self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): - """Resync all the keys associated with this workspace. This includes keys - for the storage account, app insights and password for container - registry. - :param resource_group_name: Name of the resource group in which - workspace is located. - :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. - :type workspace_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: None or ClientRawResponse if raw=true - :rtype: None or ~msrest.pipeline.ClientRawResponse - :raises: - :class:`MachineLearningServiceErrorException` - """ + def _resync_keys_initial( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.resync_keys.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) # Construct headers header_parameters = {} @@ -476,21 +519,65 @@ def resync_keys( request = self._client.post(url, query_parameters, header_parameters) response = self._client.send(request, stream=False, **operation_config) - if response.status_code not in [200]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + if response.status_code not in [200, 202]: + raise models.ErrorResponseException(self._deserialize, response) if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response + + def resync_keys( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, polling=True, **operation_config): + """Resync all the keys associated with this workspace. This includes keys + for the storage account, app insights and password for container + registry. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns None or + ClientRawResponse if raw==True + :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._resync_keys_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} def list_by_subscription( - self, skiptoken=None, custom_headers=None, raw=False, **operation_config): + self, skip=None, custom_headers=None, raw=False, **operation_config): """Lists all the available machine learning workspaces under the specified subscription. - :param skiptoken: Continuation token for pagination. - :type skiptoken: str + :param skip: Continuation token for pagination. + :type skip: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response @@ -500,22 +587,22 @@ def list_by_subscription( :rtype: ~azure.mgmt.machinelearningservices.models.WorkspacePaged[~azure.mgmt.machinelearningservices.models.Workspace] :raises: - :class:`MachineLearningServiceErrorException` + :class:`ErrorResponseException` """ def prepare_request(next_link=None): if not next_link: # Construct URL url = self.list_by_subscription.metadata['url'] path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1) } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - if skiptoken is not None: - query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + if skip is not None: + query_parameters['$skip'] = self._serialize.query("skip", skip, 'str') else: url = next_link @@ -541,7 +628,7 @@ def internal_paging(next_link=None): response = self._client.send(request, stream=False, **operation_config) if response.status_code not in [200]: - raise models.MachineLearningServiceErrorException(self._deserialize, response) + raise models.ErrorResponseException(self._deserialize, response) return response @@ -553,3 +640,282 @@ def internal_paging(next_link=None): return deserialized list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} + + def list_notebook_access_token( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """return notebook access token and refresh token. + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: NotebookAccessTokenResult or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.NotebookAccessTokenResult + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.list_notebook_access_token.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('NotebookAccessTokenResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list_notebook_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken'} + + + def _prepare_notebook_initial( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + # Construct URL + url = self.prepare_notebook.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 202]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + + if response.status_code == 200: + deserialized = self._deserialize('NotebookResourceInfo', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + def prepare_notebook( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, polling=True, **operation_config): + """ + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns NotebookResourceInfo or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningservices.models.NotebookResourceInfo] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.machinelearningservices.models.NotebookResourceInfo]] + :raises: + :class:`ErrorResponseException` + """ + raw_result = self._prepare_notebook_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + deserialized = self._deserialize('NotebookResourceInfo', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + prepare_notebook.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} + + def list_storage_account_keys( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """ + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ListStorageAccountKeysResult or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.ListStorageAccountKeysResult + or ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.list_storage_account_keys.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ListStorageAccountKeysResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list_storage_account_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys'} + + def list_notebook_keys( + self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config): + """ + + :param resource_group_name: The name of the resource group. The name + is case insensitive. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: ListNotebookKeysResult or ClientRawResponse if raw=true + :rtype: + ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult or + ~msrest.pipeline.ClientRawResponse + :raises: + :class:`ErrorResponseException` + """ + # Construct URL + url = self.list_notebook_keys.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1) + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + raise models.ErrorResponseException(self._deserialize, response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('ListNotebookKeysResult', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + list_notebook_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys'}